sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.2  magrittr_1.5    tools_3.5.2     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.21      stringr_1.3.1   xfun_0.4        digest_0.6.18  
## [13] evaluate_0.12

User Inputs

output.var = params$output.var 

transform.abs = FALSE
log.pred = FALSE
norm.pred = FALSE
if (params$trans == 1){
  transform.abs == TRUE
}else if (params$trans == 2){
  log.pred = TRUE
}else if (params$trans == 3){
  norm.pred = TRUE  
}else{
  message("You have chosen no transformation")
}

eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 13
##  $ output.var         : chr "y3"
##  $ trans              : int 2
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE & norm.pred==FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}
if (norm.pred==TRUE){
  label.names = paste('norm.',output.var,sep="")
  alt.scale.label.name = output.var
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[complete.cases(labels),]
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.29  
##  Job_00003:   1   Median :124.03  
##  Job_00004:   1   Mean   :125.40  
##  Job_00007:   1   3rd Qu.:131.06  
##  Job_00008:   1   Max.   :193.73  
##  (Other)  :6974

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  #data = filter(data, y3 < 1E7)
}
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}

t = NULL # initializw to NULL for other cases
if (norm.pred == TRUE){
  t = bestNormalize::bestNormalize(data[[alt.scale.label.name]])
  data[label.names] = predict(t)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

# function to set up random seeds
# with small changes from:
# http://jaehyeon-kim.github.io/2015/05/Setup-Random-Seeds-on-Caret-Package.html 
setCaretSeeds <- function(method = "cv", numbers = 1, repeats = 1, tunes = NULL, seed = 1701) {
  #B is the number of resamples and integer vector of M (numbers + tune length if any)
  B <- if (method == "cv") numbers
  else if(method == "repeatedcv") numbers * repeats
  else NULL
  if(is.null(length)) {
    seeds <- NULL
  } else {
    set.seed(seed = seed)
    seeds <- vector(mode = "list", length = B)
    seeds <- lapply(seeds, function(x) sample.int(n = 1000000, size = numbers + ifelse(is.null(tunes), 0, tunes)))
    seeds[[length(seeds) + 1]] <- sample.int(n = 1000000, size = 1)
  }
  # return seeds
  seeds
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,seeds = setCaretSeeds(method = "cv"
                                                     , numbers = 10
                                                     , seed = 1701)
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    h4("All models results")
    print(model.caret$results) # all model results
    h4("Best Model")
    print(model.caret$bestTune) # best model
    
    model = model.caret$finalModel
    

    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-nvmax) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    # leap function does not support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    id = rownames(model.caret$bestTune)    
    # Provides the coefficients of the best model
    # regsubsets doens return a full model (see documentation of regsubset), so we need to recalcualte themodel
    # https://stackoverflow.com/questions/13063762/how-to-obtain-a-lm-object-from-regsubsets
    h4("Coefficients of final model:")
    coefs <- coef(model, id=id)
    #calculate the model to the the coef intervals
    nams <- names(coefs)
    nams <- nams[!nams %in% "(Intercept)"]
    response <-  as.character(formula[[2]])
    form <- as.formula(paste(response, paste(nams, collapse = " + "), sep = " ~ "))
    mod <- lm(form, data = data)
    #coefs
    #coef(mod)
    print(car::Confint(mod))
    return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram
                ,modelLM=mod))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    print(model.caret$results)
    model=model.caret$finalModel
    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-lambda) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot 
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') +
      theme_light()
    plot(residHistogram)
    
    h4("Coefficients") 
    #no interval for glmnet https://stackoverflow.com/questions/39750965/confidence-intervals-for-ridge-regression
    
    t=coef(model,s=model.caret$bestTune$lambda)
    t[which(t[,1]!=0),]
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot
    dataPlot = model.caret$results %>%
        gather(key='metric',value='value',-fraction) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    h4("Coefficients") 
    t=coef(model,s=model.caret$bestTune$lambda)
    t[which(t[,1]>0),]
    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names
                      ,transformation = NULL){
  ## if using caret for glm select equivalent functionality, 
  ## need to pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
  
  if(log.pred == TRUE || norm.pred == TRUE){
    # plot transformewd comparison first
    plot(test[,label.names],pred[,1],xlab = "Actual (Transformed)", ylab = "Predicted (Transformed)")
  }
    
  if (log.pred == FALSE && norm.pred == FALSE){
    x = test[,label.names]
    y = pred[,1]
  }
  if (log.pred == TRUE){
    x = 10^test[,label.names]
    y = 10^pred[,1]  
  }
  if (norm.pred == TRUE){
    x = predict(transformation, test[,label.names], inverse = TRUE)
    y = predict(transformation, pred[,1], inverse = TRUE)
  }

  plot(x, y, xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
 formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 

# ind.interact = c("x4","x7","x8", "x9", "x10", "x11", "x14", "x16", "x17", "x21", "sqrt.x18")
# ind.nointeract = c("stat13", "stat14", "stat24", "stat60", "stat98", "stat110", "stat144", "stat149")
# 
# interact = paste(ind.interact, collapse = " + ")
# nointeract = paste(ind.nointeract, collapse = " + ")
# 
# # ^2 is 2 way interaction, ^3 is 3 way interaction
# formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " )^2 ", " + ", nointeract ))
# 
# # # * is all way interaction
# # formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " ) ", " + ", nointeract ))

grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))

print(formula)
## log.y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## log.y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.083900 -0.020733 -0.004694  0.016369  0.185484 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.970e+00  9.626e-03 204.682  < 2e-16 ***
## x1           1.121e-04  6.566e-04   0.171 0.864481    
## x2           1.089e-04  4.168e-04   0.261 0.793913    
## x3          -3.034e-05  1.148e-04  -0.264 0.791493    
## x4          -5.125e-05  9.053e-06  -5.660 1.59e-08 ***
## x5           2.265e-04  2.973e-04   0.762 0.446232    
## x6           3.921e-05  5.994e-04   0.065 0.947836    
## x7           1.126e-02  6.409e-04  17.572  < 2e-16 ***
## x8           4.714e-04  1.489e-04   3.166 0.001553 ** 
## x9           3.778e-03  3.332e-04  11.339  < 2e-16 ***
## x10          1.062e-03  3.106e-04   3.419 0.000633 ***
## x11          1.730e+05  7.388e+04   2.341 0.019250 *  
## x12         -2.696e-04  1.898e-04  -1.420 0.155618    
## x13          6.906e-05  7.539e-05   0.916 0.359661    
## x14         -1.612e-04  3.248e-04  -0.496 0.619732    
## x15          1.908e-05  3.096e-04   0.062 0.950861    
## x16          9.264e-04  2.155e-04   4.298 1.75e-05 ***
## x17          1.602e-03  3.267e-04   4.905 9.62e-07 ***
## x19          1.742e-04  1.652e-04   1.054 0.291806    
## x20         -8.032e-04  1.157e-03  -0.694 0.487692    
## x21          1.351e-04  4.261e-05   3.170 0.001530 ** 
## x22         -4.776e-04  3.457e-04  -1.381 0.167213    
## x23          2.350e-05  3.320e-04   0.071 0.943581    
## stat1       -8.418e-05  2.492e-04  -0.338 0.735550    
## stat2       -1.300e-05  2.469e-04  -0.053 0.958011    
## stat3        4.979e-04  2.502e-04   1.990 0.046648 *  
## stat4       -5.073e-04  2.500e-04  -2.029 0.042516 *  
## stat5       -1.910e-04  2.503e-04  -0.763 0.445404    
## stat6       -1.834e-04  2.507e-04  -0.732 0.464345    
## stat7       -7.256e-05  2.495e-04  -0.291 0.771198    
## stat8        4.581e-05  2.494e-04   0.184 0.854290    
## stat9       -2.055e-04  2.489e-04  -0.826 0.408982    
## stat10      -1.868e-04  2.497e-04  -0.748 0.454556    
## stat11      -1.669e-04  2.522e-04  -0.662 0.508169    
## stat12       2.106e-05  2.496e-04   0.084 0.932769    
## stat13      -4.939e-04  2.485e-04  -1.988 0.046889 *  
## stat14      -1.053e-03  2.487e-04  -4.234 2.33e-05 ***
## stat15      -2.516e-04  2.477e-04  -1.016 0.309844    
## stat16      -6.959e-05  2.490e-04  -0.280 0.779838    
## stat17      -2.138e-04  2.486e-04  -0.860 0.389895    
## stat18      -1.063e-04  2.472e-04  -0.430 0.667091    
## stat19       1.552e-04  2.473e-04   0.628 0.530187    
## stat20      -1.973e-04  2.496e-04  -0.791 0.429268    
## stat21       6.959e-05  2.503e-04   0.278 0.781006    
## stat22      -4.919e-04  2.503e-04  -1.965 0.049431 *  
## stat23       6.158e-04  2.478e-04   2.485 0.012969 *  
## stat24      -5.985e-04  2.493e-04  -2.401 0.016393 *  
## stat25      -5.530e-04  2.483e-04  -2.227 0.026014 *  
## stat26      -4.584e-04  2.497e-04  -1.836 0.066419 .  
## stat27       2.463e-05  2.508e-04   0.098 0.921754    
## stat28       3.815e-05  2.495e-04   0.153 0.878456    
## stat29       2.929e-05  2.524e-04   0.116 0.907635    
## stat30       3.519e-04  2.524e-04   1.394 0.163337    
## stat31       2.338e-05  2.526e-04   0.093 0.926249    
## stat32       7.814e-05  2.519e-04   0.310 0.756457    
## stat33      -3.078e-04  2.480e-04  -1.241 0.214587    
## stat34       7.805e-06  2.495e-04   0.031 0.975045    
## stat35      -3.102e-04  2.494e-04  -1.244 0.213484    
## stat36      -1.586e-04  2.473e-04  -0.641 0.521380    
## stat37      -3.598e-04  2.520e-04  -1.428 0.153385    
## stat38       4.852e-04  2.495e-04   1.944 0.051900 .  
## stat39      -2.292e-04  2.483e-04  -0.923 0.356130    
## stat40       1.690e-04  2.516e-04   0.672 0.501721    
## stat41      -5.534e-04  2.465e-04  -2.245 0.024809 *  
## stat42      -1.486e-04  2.481e-04  -0.599 0.549154    
## stat43      -3.791e-04  2.516e-04  -1.507 0.131868    
## stat44       5.657e-05  2.496e-04   0.227 0.820664    
## stat45      -2.918e-04  2.497e-04  -1.169 0.242602    
## stat46       3.127e-04  2.494e-04   1.254 0.209895    
## stat47       1.217e-04  2.512e-04   0.484 0.628167    
## stat48       4.141e-04  2.498e-04   1.658 0.097441 .  
## stat49       2.838e-04  2.478e-04   1.145 0.252070    
## stat50       2.363e-04  2.471e-04   0.956 0.339031    
## stat51       3.039e-04  2.482e-04   1.224 0.220890    
## stat52      -2.946e-04  2.499e-04  -1.179 0.238417    
## stat53      -3.461e-04  2.518e-04  -1.374 0.169427    
## stat54      -3.620e-04  2.509e-04  -1.443 0.149155    
## stat55      -4.409e-05  2.469e-04  -0.179 0.858272    
## stat56      -1.373e-04  2.496e-04  -0.550 0.582213    
## stat57       3.763e-05  2.450e-04   0.154 0.877910    
## stat58      -1.179e-05  2.477e-04  -0.048 0.962019    
## stat59       2.683e-04  2.490e-04   1.078 0.281173    
## stat60       3.901e-04  2.514e-04   1.552 0.120720    
## stat61      -2.056e-04  2.499e-04  -0.823 0.410655    
## stat62      -6.852e-05  2.483e-04  -0.276 0.782547    
## stat63       3.155e-04  2.507e-04   1.258 0.208370    
## stat64       5.490e-05  2.472e-04   0.222 0.824266    
## stat65      -3.509e-04  2.521e-04  -1.392 0.164114    
## stat66       2.206e-04  2.521e-04   0.875 0.381612    
## stat67       1.171e-04  2.495e-04   0.469 0.638751    
## stat68       2.500e-05  2.507e-04   0.100 0.920577    
## stat69       8.238e-05  2.491e-04   0.331 0.740841    
## stat70       2.228e-04  2.485e-04   0.897 0.369881    
## stat71      -4.478e-05  2.488e-04  -0.180 0.857174    
## stat72       3.243e-04  2.509e-04   1.293 0.196150    
## stat73       2.636e-04  2.506e-04   1.052 0.292886    
## stat74      -1.150e-04  2.498e-04  -0.460 0.645405    
## stat75      -3.086e-04  2.516e-04  -1.226 0.220103    
## stat76       1.773e-04  2.499e-04   0.709 0.478142    
## stat77      -7.566e-05  2.486e-04  -0.304 0.760859    
## stat78      -7.603e-05  2.482e-04  -0.306 0.759340    
## stat79      -3.214e-05  2.496e-04  -0.129 0.897554    
## stat80       1.123e-04  2.498e-04   0.449 0.653101    
## stat81       2.653e-04  2.499e-04   1.062 0.288331    
## stat82       2.737e-04  2.479e-04   1.104 0.269553    
## stat83      -1.478e-04  2.494e-04  -0.593 0.553448    
## stat84      -6.847e-05  2.499e-04  -0.274 0.784111    
## stat85      -3.452e-05  2.498e-04  -0.138 0.890099    
## stat86       5.179e-04  2.488e-04   2.082 0.037430 *  
## stat87      -2.982e-04  2.503e-04  -1.191 0.233535    
## stat88      -1.061e-04  2.481e-04  -0.428 0.668883    
## stat89      -1.821e-04  2.484e-04  -0.733 0.463666    
## stat90      -1.168e-04  2.511e-04  -0.465 0.641896    
## stat91      -1.819e-04  2.483e-04  -0.732 0.464021    
## stat92      -5.033e-04  2.508e-04  -2.007 0.044849 *  
## stat93      -3.352e-04  2.527e-04  -1.326 0.184804    
## stat94      -1.848e-04  2.490e-04  -0.742 0.458112    
## stat95       5.432e-05  2.491e-04   0.218 0.827401    
## stat96      -3.291e-04  2.478e-04  -1.328 0.184169    
## stat97       2.175e-04  2.485e-04   0.876 0.381326    
## stat98       3.391e-03  2.451e-04  13.835  < 2e-16 ***
## stat99       5.037e-04  2.514e-04   2.003 0.045186 *  
## stat100      6.571e-04  2.490e-04   2.639 0.008347 ** 
## stat101     -3.545e-04  2.510e-04  -1.412 0.157992    
## stat102     -1.440e-04  2.510e-04  -0.574 0.566021    
## stat103     -3.659e-04  2.518e-04  -1.453 0.146256    
## stat104     -1.654e-04  2.489e-04  -0.664 0.506448    
## stat105      2.219e-04  2.475e-04   0.896 0.370030    
## stat106     -2.275e-04  2.502e-04  -0.909 0.363238    
## stat107     -4.229e-05  2.499e-04  -0.169 0.865628    
## stat108     -1.001e-04  2.497e-04  -0.401 0.688584    
## stat109     -1.727e-04  2.491e-04  -0.693 0.488091    
## stat110     -3.240e-03  2.494e-04 -12.991  < 2e-16 ***
## stat111     -1.864e-04  2.489e-04  -0.749 0.453822    
## stat112     -3.867e-05  2.520e-04  -0.153 0.878040    
## stat113     -3.763e-04  2.500e-04  -1.505 0.132344    
## stat114      1.569e-04  2.492e-04   0.630 0.528827    
## stat115      2.206e-04  2.513e-04   0.878 0.380097    
## stat116      3.578e-05  2.512e-04   0.142 0.886728    
## stat117      1.864e-04  2.509e-04   0.743 0.457403    
## stat118     -3.651e-04  2.482e-04  -1.471 0.141337    
## stat119      2.310e-04  2.482e-04   0.931 0.352009    
## stat120      1.181e-04  2.486e-04   0.475 0.634829    
## stat121     -2.107e-04  2.483e-04  -0.849 0.396054    
## stat122     -1.253e-04  2.484e-04  -0.505 0.613891    
## stat123     -5.939e-05  2.523e-04  -0.235 0.813917    
## stat124     -2.720e-04  2.500e-04  -1.088 0.276647    
## stat125      9.406e-05  2.509e-04   0.375 0.707748    
## stat126      3.418e-04  2.480e-04   1.378 0.168239    
## stat127      1.173e-04  2.487e-04   0.472 0.637302    
## stat128     -2.721e-04  2.491e-04  -1.093 0.274599    
## stat129      7.438e-05  2.490e-04   0.299 0.765118    
## stat130      2.243e-04  2.514e-04   0.892 0.372446    
## stat131      2.694e-04  2.502e-04   1.077 0.281649    
## stat132     -5.806e-06  2.488e-04  -0.023 0.981386    
## stat133      1.065e-04  2.501e-04   0.426 0.670373    
## stat134     -1.200e-04  2.487e-04  -0.482 0.629493    
## stat135     -1.580e-04  2.488e-04  -0.635 0.525446    
## stat136      1.579e-04  2.512e-04   0.628 0.529772    
## stat137     -1.702e-05  2.482e-04  -0.069 0.945322    
## stat138      3.233e-05  2.491e-04   0.130 0.896726    
## stat139      1.006e-04  2.516e-04   0.400 0.689198    
## stat140      1.752e-06  2.484e-04   0.007 0.994374    
## stat141      2.216e-04  2.495e-04   0.888 0.374596    
## stat142     -6.611e-05  2.513e-04  -0.263 0.792480    
## stat143      2.938e-04  2.481e-04   1.184 0.236277    
## stat144      4.280e-04  2.486e-04   1.722 0.085162 .  
## stat145      8.108e-05  2.523e-04   0.321 0.747918    
## stat146     -5.864e-04  2.506e-04  -2.340 0.019299 *  
## stat147     -2.007e-04  2.528e-04  -0.794 0.427425    
## stat148     -3.326e-04  2.476e-04  -1.343 0.179264    
## stat149     -8.564e-04  2.522e-04  -3.396 0.000688 ***
## stat150     -1.803e-04  2.510e-04  -0.718 0.472717    
## stat151     -1.594e-04  2.519e-04  -0.633 0.526855    
## stat152     -1.278e-04  2.475e-04  -0.516 0.605672    
## stat153     -1.638e-04  2.532e-04  -0.647 0.517852    
## stat154      2.153e-05  2.518e-04   0.085 0.931885    
## stat155     -1.971e-04  2.474e-04  -0.797 0.425604    
## stat156      5.512e-04  2.517e-04   2.190 0.028578 *  
## stat157     -8.121e-05  2.480e-04  -0.327 0.743319    
## stat158     -1.488e-04  2.530e-04  -0.588 0.556584    
## stat159      2.773e-04  2.478e-04   1.119 0.263267    
## stat160      5.829e-05  2.497e-04   0.233 0.815450    
## stat161      3.538e-04  2.512e-04   1.408 0.159051    
## stat162      2.255e-04  2.469e-04   0.914 0.361021    
## stat163      1.945e-05  2.527e-04   0.077 0.938641    
## stat164      2.524e-04  2.516e-04   1.003 0.315841    
## stat165     -1.126e-04  2.474e-04  -0.455 0.649076    
## stat166     -4.385e-04  2.461e-04  -1.782 0.074837 .  
## stat167     -1.100e-04  2.493e-04  -0.441 0.659232    
## stat168     -8.399e-05  2.494e-04  -0.337 0.736271    
## stat169     -1.581e-05  2.488e-04  -0.064 0.949345    
## stat170     -3.184e-04  2.501e-04  -1.273 0.203046    
## stat171      1.216e-04  2.521e-04   0.482 0.629692    
## stat172      3.116e-04  2.489e-04   1.252 0.210630    
## stat173     -2.356e-04  2.520e-04  -0.935 0.349999    
## stat174     -6.264e-05  2.494e-04  -0.251 0.801696    
## stat175     -4.090e-04  2.494e-04  -1.640 0.101090    
## stat176      1.350e-04  2.495e-04   0.541 0.588596    
## stat177     -1.544e-04  2.500e-04  -0.618 0.536813    
## stat178     -1.502e-04  2.529e-04  -0.594 0.552738    
## stat179      1.209e-04  2.486e-04   0.486 0.626670    
## stat180     -3.721e-04  2.475e-04  -1.504 0.132743    
## stat181      2.363e-04  2.516e-04   0.939 0.347731    
## stat182     -2.225e-05  2.501e-04  -0.089 0.929110    
## stat183      2.652e-04  2.484e-04   1.068 0.285751    
## stat184      7.572e-05  2.507e-04   0.302 0.762609    
## stat185     -1.638e-04  2.476e-04  -0.662 0.508258    
## stat186     -2.366e-04  2.525e-04  -0.937 0.348780    
## stat187     -4.852e-04  2.489e-04  -1.949 0.051353 .  
## stat188     -7.384e-05  2.485e-04  -0.297 0.766414    
## stat189     -9.326e-05  2.498e-04  -0.373 0.708931    
## stat190      6.626e-05  2.489e-04   0.266 0.790078    
## stat191     -3.797e-04  2.506e-04  -1.515 0.129771    
## stat192      1.514e-04  2.521e-04   0.601 0.548139    
## stat193      1.131e-04  2.535e-04   0.446 0.655529    
## stat194      2.139e-05  2.466e-04   0.087 0.930872    
## stat195      3.255e-04  2.495e-04   1.305 0.191993    
## stat196     -3.154e-05  2.540e-04  -0.124 0.901180    
## stat197      2.034e-04  2.475e-04   0.822 0.411214    
## stat198     -2.453e-04  2.509e-04  -0.978 0.328286    
## stat199      3.718e-04  2.465e-04   1.508 0.131514    
## stat200     -4.652e-05  2.476e-04  -0.188 0.850945    
## stat201     -6.559e-05  2.494e-04  -0.263 0.792573    
## stat202     -5.160e-04  2.525e-04  -2.044 0.041020 *  
## stat203     -2.135e-05  2.487e-04  -0.086 0.931581    
## stat204     -5.174e-04  2.476e-04  -2.089 0.036715 *  
## stat205     -1.124e-04  2.492e-04  -0.451 0.652003    
## stat206     -2.150e-05  2.523e-04  -0.085 0.932115    
## stat207      4.829e-04  2.499e-04   1.933 0.053323 .  
## stat208      1.077e-04  2.505e-04   0.430 0.667310    
## stat209     -5.750e-05  2.489e-04  -0.231 0.817277    
## stat210     -2.754e-04  2.507e-04  -1.099 0.271988    
## stat211     -3.259e-04  2.489e-04  -1.309 0.190584    
## stat212      3.198e-05  2.488e-04   0.129 0.897727    
## stat213     -2.434e-04  2.507e-04  -0.971 0.331789    
## stat214     -3.181e-04  2.496e-04  -1.274 0.202581    
## stat215     -1.729e-04  2.511e-04  -0.689 0.491156    
## stat216     -1.718e-04  2.495e-04  -0.688 0.491238    
## stat217      4.361e-04  2.505e-04   1.741 0.081728 .  
## sqrt.x18     2.680e-02  9.543e-04  28.079  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.0316 on 5343 degrees of freedom
## Multiple R-squared:  0.2752, Adjusted R-squared:  0.2427 
## F-statistic: 8.455 on 240 and 5343 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 279"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.061130 -0.017579 -0.002424  0.016562  0.071696 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.957e+00  7.920e-03 247.062  < 2e-16 ***
## x1           1.124e-04  5.388e-04   0.209 0.834779    
## x2           3.904e-05  3.416e-04   0.114 0.909012    
## x3          -5.664e-05  9.380e-05  -0.604 0.545995    
## x4          -5.685e-05  7.428e-06  -7.654 2.31e-14 ***
## x5           3.404e-04  2.431e-04   1.400 0.161529    
## x6          -2.737e-04  4.913e-04  -0.557 0.577468    
## x7           1.217e-02  5.255e-04  23.168  < 2e-16 ***
## x8           5.240e-04  1.222e-04   4.287 1.85e-05 ***
## x9           3.639e-03  2.724e-04  13.362  < 2e-16 ***
## x10          1.379e-03  2.547e-04   5.414 6.46e-08 ***
## x11          2.179e+05  6.071e+04   3.589 0.000335 ***
## x12         -5.167e-05  1.549e-04  -0.333 0.738773    
## x13          1.066e-04  6.190e-05   1.722 0.085098 .  
## x14         -1.137e-04  2.659e-04  -0.428 0.668889    
## x15          1.198e-04  2.539e-04   0.472 0.637249    
## x16          9.677e-04  1.766e-04   5.479 4.49e-08 ***
## x17          1.553e-03  2.681e-04   5.790 7.45e-09 ***
## x19          1.251e-04  1.352e-04   0.926 0.354728    
## x20         -6.654e-06  9.509e-04  -0.007 0.994417    
## x21          1.493e-04  3.492e-05   4.274 1.95e-05 ***
## x22         -5.424e-04  2.828e-04  -1.918 0.055185 .  
## x23          1.788e-04  2.724e-04   0.656 0.511743    
## stat1       -2.208e-04  2.041e-04  -1.082 0.279426    
## stat2       -1.418e-05  2.021e-04  -0.070 0.944059    
## stat3        5.148e-04  2.054e-04   2.506 0.012241 *  
## stat4       -5.554e-04  2.055e-04  -2.703 0.006901 ** 
## stat5       -3.911e-04  2.054e-04  -1.904 0.057006 .  
## stat6       -2.684e-04  2.053e-04  -1.307 0.191252    
## stat7       -2.019e-04  2.042e-04  -0.989 0.322829    
## stat8       -4.700e-05  2.042e-04  -0.230 0.817996    
## stat9       -2.131e-04  2.040e-04  -1.044 0.296318    
## stat10      -1.541e-04  2.041e-04  -0.755 0.450350    
## stat11      -4.591e-04  2.068e-04  -2.220 0.026432 *  
## stat12      -2.021e-05  2.042e-04  -0.099 0.921185    
## stat13      -4.868e-04  2.036e-04  -2.391 0.016841 *  
## stat14      -1.161e-03  2.037e-04  -5.703 1.25e-08 ***
## stat15      -4.320e-04  2.034e-04  -2.123 0.033768 *  
## stat16      -3.146e-04  2.039e-04  -1.543 0.123000    
## stat17      -1.265e-04  2.039e-04  -0.620 0.534997    
## stat18      -1.641e-04  2.024e-04  -0.811 0.417546    
## stat19       7.071e-05  2.035e-04   0.348 0.728210    
## stat20       1.879e-04  2.047e-04   0.918 0.358766    
## stat21       4.736e-05  2.051e-04   0.231 0.817429    
## stat22      -3.510e-04  2.048e-04  -1.714 0.086568 .  
## stat23       5.181e-04  2.034e-04   2.547 0.010888 *  
## stat24      -5.701e-04  2.044e-04  -2.789 0.005309 ** 
## stat25      -5.471e-04  2.037e-04  -2.686 0.007257 ** 
## stat26      -5.974e-04  2.054e-04  -2.909 0.003643 ** 
## stat27       1.029e-04  2.059e-04   0.500 0.617351    
## stat28      -1.093e-04  2.046e-04  -0.534 0.593104    
## stat29       2.579e-05  2.070e-04   0.125 0.900880    
## stat30       2.755e-04  2.063e-04   1.335 0.181792    
## stat31       1.184e-04  2.070e-04   0.572 0.567124    
## stat32       1.410e-05  2.066e-04   0.068 0.945589    
## stat33      -2.826e-04  2.033e-04  -1.390 0.164463    
## stat34       1.435e-04  2.042e-04   0.703 0.482149    
## stat35      -3.894e-04  2.044e-04  -1.905 0.056874 .  
## stat36      -6.682e-05  2.032e-04  -0.329 0.742296    
## stat37      -2.611e-04  2.068e-04  -1.263 0.206649    
## stat38       5.639e-04  2.040e-04   2.764 0.005733 ** 
## stat39      -2.990e-04  2.031e-04  -1.473 0.140909    
## stat40       2.341e-04  2.065e-04   1.134 0.256996    
## stat41      -4.528e-04  2.016e-04  -2.246 0.024746 *  
## stat42      -9.128e-05  2.036e-04  -0.448 0.653905    
## stat43      -3.822e-04  2.060e-04  -1.855 0.063648 .  
## stat44       1.175e-04  2.047e-04   0.574 0.566061    
## stat45      -7.835e-05  2.046e-04  -0.383 0.701791    
## stat46       2.477e-04  2.044e-04   1.212 0.225749    
## stat47       2.994e-04  2.058e-04   1.455 0.145767    
## stat48       3.858e-04  2.046e-04   1.886 0.059382 .  
## stat49       1.107e-04  2.035e-04   0.544 0.586543    
## stat50       2.526e-04  2.029e-04   1.245 0.213239    
## stat51       1.425e-04  2.032e-04   0.701 0.483262    
## stat52      -5.201e-05  2.050e-04  -0.254 0.799680    
## stat53      -2.852e-04  2.063e-04  -1.383 0.166809    
## stat54      -2.823e-04  2.063e-04  -1.369 0.171079    
## stat55      -1.926e-04  2.024e-04  -0.952 0.341376    
## stat56       4.566e-05  2.046e-04   0.223 0.823425    
## stat57      -2.778e-06  2.008e-04  -0.014 0.988962    
## stat58      -5.460e-05  2.028e-04  -0.269 0.787723    
## stat59       2.310e-04  2.039e-04   1.133 0.257384    
## stat60       4.545e-04  2.062e-04   2.204 0.027566 *  
## stat61      -1.828e-04  2.049e-04  -0.892 0.372510    
## stat62      -1.500e-04  2.033e-04  -0.738 0.460730    
## stat63       2.247e-04  2.057e-04   1.092 0.274699    
## stat64       3.406e-04  2.025e-04   1.682 0.092702 .  
## stat65      -1.798e-04  2.066e-04  -0.870 0.384364    
## stat66       2.555e-04  2.065e-04   1.237 0.216046    
## stat67       2.578e-04  2.045e-04   1.261 0.207473    
## stat68      -6.655e-07  2.053e-04  -0.003 0.997413    
## stat69       6.944e-05  2.042e-04   0.340 0.733830    
## stat70       1.611e-04  2.038e-04   0.790 0.429278    
## stat71       7.455e-05  2.042e-04   0.365 0.715116    
## stat72       1.116e-04  2.059e-04   0.542 0.587953    
## stat73       2.757e-04  2.056e-04   1.341 0.180056    
## stat74       7.952e-06  2.047e-04   0.039 0.969019    
## stat75      -1.063e-04  2.062e-04  -0.516 0.606106    
## stat76       1.462e-04  2.048e-04   0.714 0.475345    
## stat77       1.544e-04  2.045e-04   0.755 0.450345    
## stat78      -2.144e-04  2.028e-04  -1.057 0.290554    
## stat79       6.847e-05  2.039e-04   0.336 0.737083    
## stat80       1.096e-04  2.048e-04   0.535 0.592806    
## stat81       2.115e-04  2.047e-04   1.033 0.301577    
## stat82       8.434e-05  2.036e-04   0.414 0.678681    
## stat83      -1.309e-04  2.042e-04  -0.641 0.521508    
## stat84      -1.416e-04  2.046e-04  -0.692 0.488888    
## stat85      -3.848e-04  2.048e-04  -1.879 0.060353 .  
## stat86       5.173e-04  2.042e-04   2.534 0.011317 *  
## stat87      -3.172e-04  2.052e-04  -1.546 0.122270    
## stat88       5.209e-05  2.035e-04   0.256 0.797988    
## stat89       1.480e-05  2.042e-04   0.072 0.942247    
## stat90      -1.928e-04  2.059e-04  -0.936 0.349195    
## stat91      -2.231e-04  2.029e-04  -1.099 0.271641    
## stat92      -3.890e-04  2.057e-04  -1.892 0.058608 .  
## stat93      -1.570e-04  2.079e-04  -0.755 0.450172    
## stat94      -3.292e-05  2.035e-04  -0.162 0.871469    
## stat95       2.973e-04  2.048e-04   1.451 0.146757    
## stat96      -3.758e-04  2.033e-04  -1.849 0.064546 .  
## stat97       3.256e-04  2.034e-04   1.600 0.109553    
## stat98       3.349e-03  2.007e-04  16.682  < 2e-16 ***
## stat99       6.072e-04  2.061e-04   2.946 0.003238 ** 
## stat100      6.492e-04  2.041e-04   3.181 0.001476 ** 
## stat101     -2.239e-04  2.058e-04  -1.088 0.276596    
## stat102     -1.726e-04  2.057e-04  -0.839 0.401366    
## stat103     -4.276e-04  2.060e-04  -2.076 0.037981 *  
## stat104     -1.330e-04  2.044e-04  -0.651 0.515319    
## stat105      3.199e-04  2.030e-04   1.576 0.115081    
## stat106     -2.813e-04  2.051e-04  -1.372 0.170187    
## stat107      9.396e-06  2.044e-04   0.046 0.963343    
## stat108     -1.101e-04  2.049e-04  -0.537 0.591041    
## stat109     -3.320e-04  2.044e-04  -1.625 0.104322    
## stat110     -3.208e-03  2.043e-04 -15.702  < 2e-16 ***
## stat111      1.662e-05  2.038e-04   0.082 0.935019    
## stat112     -2.981e-05  2.066e-04  -0.144 0.885287    
## stat113     -2.983e-04  2.048e-04  -1.456 0.145451    
## stat114      3.173e-04  2.044e-04   1.552 0.120650    
## stat115      3.151e-04  2.061e-04   1.529 0.126301    
## stat116      8.592e-05  2.061e-04   0.417 0.676756    
## stat117      1.927e-04  2.051e-04   0.939 0.347529    
## stat118     -8.010e-05  2.033e-04  -0.394 0.693652    
## stat119      3.017e-04  2.029e-04   1.487 0.137113    
## stat120     -3.655e-05  2.040e-04  -0.179 0.857803    
## stat121     -3.460e-04  2.033e-04  -1.702 0.088848 .  
## stat122     -1.836e-04  2.038e-04  -0.901 0.367767    
## stat123      1.135e-04  2.065e-04   0.549 0.582829    
## stat124     -2.742e-04  2.048e-04  -1.339 0.180707    
## stat125     -2.112e-05  2.054e-04  -0.103 0.918118    
## stat126      3.472e-04  2.032e-04   1.709 0.087504 .  
## stat127      5.135e-05  2.035e-04   0.252 0.800736    
## stat128     -4.307e-04  2.036e-04  -2.115 0.034467 *  
## stat129      1.650e-04  2.035e-04   0.811 0.417586    
## stat130      1.008e-04  2.060e-04   0.489 0.624539    
## stat131      1.689e-04  2.051e-04   0.824 0.410167    
## stat132     -3.814e-05  2.039e-04  -0.187 0.851613    
## stat133      2.685e-04  2.056e-04   1.306 0.191741    
## stat134     -4.245e-05  2.037e-04  -0.208 0.834919    
## stat135     -1.160e-04  2.038e-04  -0.569 0.569159    
## stat136     -5.316e-05  2.055e-04  -0.259 0.795897    
## stat137      3.546e-05  2.033e-04   0.174 0.861544    
## stat138      2.552e-05  2.047e-04   0.125 0.900782    
## stat139     -1.574e-04  2.063e-04  -0.763 0.445703    
## stat140      6.402e-05  2.029e-04   0.315 0.752422    
## stat141      2.758e-04  2.043e-04   1.350 0.177188    
## stat142     -1.972e-05  2.061e-04  -0.096 0.923774    
## stat143      6.673e-05  2.034e-04   0.328 0.742946    
## stat144      4.561e-04  2.035e-04   2.241 0.025076 *  
## stat145      1.693e-06  2.069e-04   0.008 0.993470    
## stat146     -6.706e-04  2.052e-04  -3.269 0.001088 ** 
## stat147     -2.312e-04  2.076e-04  -1.114 0.265319    
## stat148     -1.794e-04  2.033e-04  -0.882 0.377710    
## stat149     -6.403e-04  2.071e-04  -3.092 0.002000 ** 
## stat150     -2.275e-04  2.064e-04  -1.102 0.270504    
## stat151      9.581e-05  2.071e-04   0.463 0.643686    
## stat152      4.089e-06  2.025e-04   0.020 0.983889    
## stat153      9.512e-05  2.072e-04   0.459 0.646134    
## stat154      2.643e-04  2.064e-04   1.280 0.200516    
## stat155      1.356e-05  2.031e-04   0.067 0.946776    
## stat156      5.280e-04  2.061e-04   2.562 0.010428 *  
## stat157     -3.019e-05  2.032e-04  -0.149 0.881892    
## stat158      1.764e-05  2.077e-04   0.085 0.932325    
## stat159      3.689e-04  2.034e-04   1.814 0.069768 .  
## stat160      6.818e-06  2.053e-04   0.033 0.973509    
## stat161      1.101e-04  2.059e-04   0.535 0.592754    
## stat162      3.458e-04  2.021e-04   1.711 0.087152 .  
## stat163      1.274e-04  2.077e-04   0.613 0.539694    
## stat164      9.232e-06  2.064e-04   0.045 0.964325    
## stat165      9.304e-05  2.028e-04   0.459 0.646427    
## stat166     -3.516e-04  2.013e-04  -1.747 0.080726 .  
## stat167     -2.674e-04  2.047e-04  -1.307 0.191401    
## stat168     -9.257e-05  2.044e-04  -0.453 0.650694    
## stat169     -3.094e-05  2.041e-04  -0.152 0.879516    
## stat170     -2.803e-04  2.050e-04  -1.367 0.171597    
## stat171     -1.520e-04  2.066e-04  -0.736 0.461937    
## stat172      5.486e-04  2.038e-04   2.691 0.007142 ** 
## stat173     -3.383e-05  2.067e-04  -0.164 0.869994    
## stat174      1.539e-05  2.047e-04   0.075 0.940051    
## stat175     -3.877e-04  2.043e-04  -1.898 0.057796 .  
## stat176     -1.853e-04  2.043e-04  -0.907 0.364299    
## stat177     -3.583e-04  2.053e-04  -1.745 0.080968 .  
## stat178     -2.287e-05  2.071e-04  -0.110 0.912081    
## stat179      7.544e-05  2.037e-04   0.370 0.711077    
## stat180     -3.126e-04  2.035e-04  -1.536 0.124559    
## stat181      2.435e-04  2.061e-04   1.181 0.237578    
## stat182      8.526e-05  2.053e-04   0.415 0.677943    
## stat183      2.519e-04  2.038e-04   1.236 0.216531    
## stat184      3.513e-04  2.052e-04   1.712 0.087003 .  
## stat185     -3.058e-05  2.030e-04  -0.151 0.880227    
## stat186      3.070e-05  2.070e-04   0.148 0.882113    
## stat187     -4.247e-04  2.037e-04  -2.085 0.037130 *  
## stat188     -2.690e-05  2.036e-04  -0.132 0.894881    
## stat189     -1.724e-04  2.050e-04  -0.841 0.400340    
## stat190     -5.914e-05  2.039e-04  -0.290 0.771774    
## stat191     -1.645e-04  2.055e-04  -0.800 0.423523    
## stat192      8.248e-05  2.068e-04   0.399 0.690031    
## stat193      2.310e-04  2.082e-04   1.110 0.267263    
## stat194     -8.337e-05  2.026e-04  -0.412 0.680687    
## stat195     -3.461e-06  2.047e-04  -0.017 0.986506    
## stat196     -2.093e-04  2.081e-04  -1.006 0.314660    
## stat197      2.663e-05  2.032e-04   0.131 0.895742    
## stat198     -2.215e-04  2.056e-04  -1.078 0.281283    
## stat199      3.352e-04  2.018e-04   1.661 0.096844 .  
## stat200     -3.283e-05  2.034e-04  -0.161 0.871808    
## stat201      6.371e-05  2.050e-04   0.311 0.755991    
## stat202     -4.438e-04  2.069e-04  -2.145 0.032032 *  
## stat203     -3.401e-05  2.038e-04  -0.167 0.867463    
## stat204     -2.247e-04  2.033e-04  -1.105 0.269220    
## stat205      1.332e-04  2.038e-04   0.654 0.513310    
## stat206     -9.722e-05  2.068e-04  -0.470 0.638228    
## stat207      5.865e-04  2.049e-04   2.862 0.004222 ** 
## stat208      1.653e-04  2.056e-04   0.804 0.421426    
## stat209      1.082e-04  2.034e-04   0.532 0.594960    
## stat210     -4.433e-04  2.051e-04  -2.161 0.030730 *  
## stat211     -3.764e-04  2.040e-04  -1.845 0.065129 .  
## stat212      6.992e-05  2.040e-04   0.343 0.731763    
## stat213     -2.753e-04  2.053e-04  -1.341 0.180018    
## stat214     -1.240e-04  2.047e-04  -0.606 0.544853    
## stat215     -8.564e-05  2.059e-04  -0.416 0.677559    
## stat216     -5.266e-05  2.040e-04  -0.258 0.796336    
## stat217      4.939e-04  2.050e-04   2.409 0.016019 *  
## sqrt.x18     2.655e-02  7.793e-04  34.062  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.02521 on 5064 degrees of freedom
## Multiple R-squared:  0.3799, Adjusted R-squared:  0.3505 
## F-statistic: 12.93 on 240 and 5064 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 265"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names)), function(x) t.test(x ~ plotData$type, var.equal = TRUE)) 

sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
##        stat4       stat11       stat19       stat86       stat98 
## 3.040326e-02 3.419846e-02 6.545155e-03 4.119835e-02 2.379839e-06 
##      stat110      stat149      stat151     sqrt.x18 
## 5.732588e-04 2.613259e-02 2.262718e-02 1.128002e-02
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))

ggplot(mm) +
  geom_boxplot(aes(x=type, y=value))+
  facet_wrap(~variable, ncol=16, scales = 'free') +
  ggtitle('Distribution of High Leverage Points and Normal Points')

title = paste("comparison_trans",params$trans,'.jpeg', sep="")
ggsave(title, width =100, height = 200, units='cm',limitsize = FALSE)

Grand Means Model

model.null = lm(grand.mean.formula, data.train)
model.null2 = lm(grand.mean.formula, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03426821 0.1094342 0.02668433 0.0007671751 0.01830599
## 2       2 0.03341813 0.1535728 0.02594003 0.0008198294 0.02639043
## 3       3 0.03287257 0.1816561 0.02541294 0.0008503685 0.03153037
## 4       4 0.03239628 0.2054364 0.02473878 0.0008624627 0.03152274
## 5       5 0.03201215 0.2239753 0.02445310 0.0008662346 0.02725898
## 6       6 0.03191524 0.2285619 0.02438627 0.0008360375 0.02470748
## 7       7 0.03191420 0.2285037 0.02442852 0.0007968190 0.02338661
## 8       8 0.03185561 0.2313736 0.02438606 0.0007411257 0.02370581
## 9       9 0.03183263 0.2324701 0.02435534 0.0007863111 0.02567275
## 10     10 0.03180391 0.2338324 0.02433061 0.0007389905 0.02451299
## 11     11 0.03180100 0.2340895 0.02433364 0.0007557523 0.02594161
## 12     12 0.03177341 0.2353893 0.02431505 0.0007382138 0.02432139
## 13     13 0.03177927 0.2351046 0.02431925 0.0007220642 0.02438588
## 14     14 0.03178458 0.2348190 0.02433793 0.0006950445 0.02268287
## 15     15 0.03180628 0.2337481 0.02435280 0.0006775356 0.02121300
## 16     16 0.03182834 0.2327028 0.02437532 0.0006658929 0.02087200
## 17     17 0.03183880 0.2322673 0.02438065 0.0006477234 0.02002256
## 18     18 0.03185728 0.2313949 0.02439923 0.0006370540 0.01992706
## 19     19 0.03185094 0.2317484 0.02441870 0.0006584165 0.02053793
## 20     20 0.03186668 0.2310556 0.02442846 0.0006507716 0.01986592
## 21     21 0.03188181 0.2303584 0.02445138 0.0006555873 0.01951880
## 22     22 0.03190905 0.2290782 0.02446255 0.0006596734 0.01845077
## 23     23 0.03192581 0.2283415 0.02446578 0.0006778476 0.01924435
## 24     24 0.03193685 0.2278706 0.02447833 0.0006865324 0.01919079
## 25     25 0.03195930 0.2268606 0.02450397 0.0006918363 0.01984599
## 26     26 0.03195229 0.2272485 0.02449240 0.0006995614 0.01994106
## 27     27 0.03193463 0.2280680 0.02448302 0.0006941963 0.02006089
## 28     28 0.03191824 0.2289272 0.02447601 0.0006762115 0.02059623
## 29     29 0.03190792 0.2294304 0.02447571 0.0006639699 0.02025941
## 30     30 0.03191222 0.2292236 0.02448280 0.0006510563 0.02048205
## 31     31 0.03195409 0.2273411 0.02452196 0.0006429012 0.02056087
## 32     32 0.03192395 0.2287506 0.02449213 0.0006325914 0.02046504
## 33     33 0.03193047 0.2284937 0.02449999 0.0006273371 0.02123606
## 34     34 0.03192327 0.2288010 0.02449162 0.0006307530 0.02087625
## 35     35 0.03192522 0.2287296 0.02448484 0.0006302308 0.01993545
## 36     36 0.03193096 0.2285034 0.02449350 0.0006089271 0.02022881
## 37     37 0.03194085 0.2280825 0.02450151 0.0006087105 0.02031436
## 38     38 0.03193008 0.2285704 0.02448704 0.0006193518 0.01958586
## 39     39 0.03194577 0.2279179 0.02448892 0.0006081918 0.02020100
## 40     40 0.03196852 0.2269090 0.02449195 0.0006205796 0.01962540
## 41     41 0.03196665 0.2269924 0.02449817 0.0006187376 0.01924867
## 42     42 0.03197269 0.2267516 0.02449873 0.0006227296 0.01984074
## 43     43 0.03198738 0.2260724 0.02451125 0.0006093109 0.01961869
## 44     44 0.03199881 0.2255630 0.02451572 0.0006080875 0.01921847
## 45     45 0.03201167 0.2249822 0.02452857 0.0006271203 0.01897249
## 46     46 0.03201069 0.2250573 0.02453020 0.0006324289 0.01878601
## 47     47 0.03200373 0.2254276 0.02452780 0.0006304715 0.01826924
## 48     48 0.03200624 0.2253907 0.02453001 0.0006280381 0.01851457
## 49     49 0.03202597 0.2244602 0.02455595 0.0006316582 0.01812999
## 50     50 0.03205126 0.2233576 0.02457157 0.0006228733 0.01756317
## 51     51 0.03206149 0.2229722 0.02457151 0.0006158132 0.01781484
## 52     52 0.03207203 0.2225430 0.02458470 0.0006264715 0.01800897
## 53     53 0.03208384 0.2220078 0.02460172 0.0006389290 0.01799717
## 54     54 0.03209719 0.2213894 0.02461002 0.0006417562 0.01793491
## 55     55 0.03209292 0.2216171 0.02460930 0.0006421297 0.01810897
## 56     56 0.03210193 0.2212011 0.02461128 0.0006347372 0.01827287
## 57     57 0.03209269 0.2216856 0.02459895 0.0006208944 0.01818983
## 58     58 0.03209609 0.2215694 0.02460122 0.0006295605 0.01848164
## 59     59 0.03210467 0.2212312 0.02461121 0.0006233534 0.01861395
## 60     60 0.03211723 0.2206950 0.02462216 0.0006248028 0.01859949
## 61     61 0.03212781 0.2202489 0.02462611 0.0006245218 0.01890809
## 62     62 0.03213398 0.2200049 0.02462934 0.0006249900 0.01886586
## 63     63 0.03215298 0.2191492 0.02464110 0.0006194438 0.01834889
## 64     64 0.03215980 0.2188043 0.02464484 0.0006151878 0.01833291
## 65     65 0.03215268 0.2191394 0.02464373 0.0006295967 0.01816953
## 66     66 0.03216771 0.2184917 0.02466566 0.0006420285 0.01836679
## 67     67 0.03217665 0.2181480 0.02467085 0.0006347755 0.01817580
## 68     68 0.03218225 0.2179629 0.02467849 0.0006442916 0.01858966
## 69     69 0.03217409 0.2183762 0.02467501 0.0006484274 0.01879120
## 70     70 0.03217151 0.2184866 0.02468093 0.0006436357 0.01829605
## 71     71 0.03217393 0.2184388 0.02468077 0.0006427798 0.01837177
## 72     72 0.03218448 0.2180175 0.02469947 0.0006551463 0.01897174
## 73     73 0.03220101 0.2173174 0.02470637 0.0006523930 0.01847221
## 74     74 0.03220592 0.2170922 0.02470196 0.0006463080 0.01775529
## 75     75 0.03220720 0.2170640 0.02469980 0.0006501428 0.01773581
## 76     76 0.03220265 0.2172768 0.02469786 0.0006442777 0.01761793
## 77     77 0.03220860 0.2170419 0.02470216 0.0006386177 0.01734120
## 78     78 0.03220835 0.2171208 0.02470056 0.0006298444 0.01734084
## 79     79 0.03221731 0.2167608 0.02471252 0.0006337298 0.01725232
## 80     80 0.03222825 0.2162655 0.02471721 0.0006296764 0.01680812
## 81     81 0.03223150 0.2161234 0.02472546 0.0006411103 0.01726271
## 82     82 0.03224477 0.2155970 0.02473160 0.0006421587 0.01751684
## 83     83 0.03223945 0.2158325 0.02472726 0.0006403415 0.01746363
## 84     84 0.03224685 0.2155050 0.02473293 0.0006364063 0.01734418
## 85     85 0.03224417 0.2156034 0.02472784 0.0006390850 0.01734229
## 86     86 0.03224923 0.2154398 0.02473130 0.0006323636 0.01751333
## 87     87 0.03224430 0.2156634 0.02472860 0.0006228449 0.01711677
## 88     88 0.03223774 0.2159681 0.02472657 0.0006177799 0.01697616
## 89     89 0.03223819 0.2159532 0.02472321 0.0006190769 0.01675709
## 90     90 0.03223951 0.2159324 0.02472461 0.0006255624 0.01670042
## 91     91 0.03224406 0.2157359 0.02472841 0.0006318778 0.01646294
## 92     92 0.03224305 0.2157928 0.02473292 0.0006322961 0.01634367
## 93     93 0.03224365 0.2158029 0.02473659 0.0006325796 0.01660013
## 94     94 0.03224906 0.2156090 0.02474270 0.0006386478 0.01670475
## 95     95 0.03224913 0.2155885 0.02474691 0.0006438709 0.01651676
## 96     96 0.03225813 0.2152454 0.02475323 0.0006467038 0.01687423
## 97     97 0.03225834 0.2152502 0.02474765 0.0006472903 0.01677075
## 98     98 0.03225463 0.2154297 0.02473930 0.0006454656 0.01640629
## 99     99 0.03225717 0.2153053 0.02474360 0.0006466440 0.01605013
## 100   100 0.03226659 0.2149000 0.02475087 0.0006494355 0.01611305
## 101   101 0.03227159 0.2147149 0.02475184 0.0006419314 0.01602198
## 102   102 0.03226993 0.2147805 0.02474568 0.0006483344 0.01599985
## 103   103 0.03227292 0.2146767 0.02474884 0.0006402580 0.01581695
## 104   104 0.03227658 0.2145037 0.02475449 0.0006361294 0.01600140
## 105   105 0.03227255 0.2147153 0.02475584 0.0006329878 0.01628442
## 106   106 0.03227041 0.2148038 0.02475442 0.0006357287 0.01616218
## 107   107 0.03227415 0.2146590 0.02476042 0.0006318564 0.01625829
## 108   108 0.03227798 0.2145237 0.02476429 0.0006335285 0.01610167
## 109   109 0.03227709 0.2145987 0.02475491 0.0006329587 0.01638922
## 110   110 0.03227405 0.2147702 0.02475209 0.0006452822 0.01678020
## 111   111 0.03227282 0.2148331 0.02475284 0.0006428571 0.01648768
## 112   112 0.03226933 0.2149889 0.02475049 0.0006475444 0.01669529
## 113   113 0.03226950 0.2149786 0.02474993 0.0006525507 0.01657365
## 114   114 0.03227891 0.2145426 0.02475797 0.0006551655 0.01654697
## 115   115 0.03228021 0.2145169 0.02476054 0.0006608860 0.01660636
## 116   116 0.03227690 0.2146894 0.02475729 0.0006646584 0.01683001
## 117   117 0.03228168 0.2144804 0.02475921 0.0006649156 0.01703126
## 118   118 0.03229359 0.2140007 0.02477028 0.0006686185 0.01711490
## 119   119 0.03228874 0.2142212 0.02477214 0.0006728864 0.01750192
## 120   120 0.03229088 0.2141434 0.02476974 0.0006674165 0.01746285
## 121   121 0.03228198 0.2145290 0.02475857 0.0006641923 0.01759512
## 122   122 0.03228269 0.2145181 0.02475987 0.0006582348 0.01761936
## 123   123 0.03228633 0.2143887 0.02476340 0.0006670272 0.01797569
## 124   124 0.03228793 0.2143407 0.02476486 0.0006659908 0.01797167
## 125   125 0.03228712 0.2143856 0.02476388 0.0006747819 0.01814827
## 126   126 0.03229135 0.2141801 0.02477349 0.0006785692 0.01799204
## 127   127 0.03230144 0.2137478 0.02477738 0.0006844999 0.01799469
## 128   128 0.03230364 0.2136557 0.02477921 0.0006808398 0.01772381
## 129   129 0.03230655 0.2135118 0.02477511 0.0006745562 0.01761544
## 130   130 0.03230799 0.2134492 0.02477385 0.0006735705 0.01746046
## 131   131 0.03231584 0.2131339 0.02478280 0.0006718777 0.01757392
## 132   132 0.03231831 0.2130154 0.02478454 0.0006697196 0.01774060
## 133   133 0.03232054 0.2129228 0.02478122 0.0006722759 0.01766826
## 134   134 0.03232381 0.2127702 0.02478217 0.0006814165 0.01775740
## 135   135 0.03232278 0.2128476 0.02478310 0.0006772387 0.01778497
## 136   136 0.03232385 0.2128166 0.02478820 0.0006757226 0.01775764
## 137   137 0.03232151 0.2129542 0.02478472 0.0006774185 0.01787412
## 138   138 0.03232367 0.2128562 0.02478298 0.0006703701 0.01759968
## 139   139 0.03232744 0.2127274 0.02478718 0.0006718483 0.01792421
## 140   140 0.03233150 0.2125473 0.02479179 0.0006685632 0.01777348
## 141   141 0.03233163 0.2125417 0.02479117 0.0006687713 0.01784921
## 142   142 0.03233493 0.2124246 0.02479499 0.0006647870 0.01788960
## 143   143 0.03233677 0.2123390 0.02479594 0.0006646799 0.01795908
## 144   144 0.03233579 0.2123899 0.02479402 0.0006676344 0.01815614
## 145   145 0.03233723 0.2123396 0.02479531 0.0006656424 0.01801995
## 146   146 0.03234453 0.2120361 0.02480153 0.0006634492 0.01791735
## 147   147 0.03234376 0.2120883 0.02480164 0.0006571875 0.01792476
## 148   148 0.03234897 0.2118721 0.02480990 0.0006608017 0.01798967
## 149   149 0.03235283 0.2117083 0.02481270 0.0006586305 0.01814311
## 150   150 0.03235669 0.2115298 0.02481391 0.0006602544 0.01809824
## 151   151 0.03236129 0.2113324 0.02481804 0.0006545482 0.01802685
## 152   152 0.03236398 0.2112066 0.02481873 0.0006534630 0.01782330
## 153   153 0.03235950 0.2114072 0.02481602 0.0006472084 0.01772101
## 154   154 0.03236417 0.2112322 0.02481968 0.0006439264 0.01773443
## 155   155 0.03236477 0.2112258 0.02481963 0.0006448558 0.01762818
## 156   156 0.03236501 0.2112139 0.02481908 0.0006454403 0.01727987
## 157   157 0.03236388 0.2112781 0.02481677 0.0006433463 0.01715678
## 158   158 0.03236604 0.2111649 0.02481603 0.0006443607 0.01719806
## 159   159 0.03236446 0.2112209 0.02481391 0.0006388226 0.01695090
## 160   160 0.03236134 0.2113546 0.02481153 0.0006401905 0.01715662
## 161   161 0.03235990 0.2114215 0.02481090 0.0006386619 0.01701290
## 162   162 0.03235908 0.2114664 0.02481089 0.0006404744 0.01711402
## 163   163 0.03236064 0.2113986 0.02481022 0.0006403783 0.01700835
## 164   164 0.03236376 0.2112757 0.02481469 0.0006416661 0.01719939
## 165   165 0.03236292 0.2113317 0.02481131 0.0006335127 0.01720271
## 166   166 0.03236521 0.2112333 0.02481286 0.0006339726 0.01725137
## 167   167 0.03236662 0.2111607 0.02481641 0.0006352569 0.01725372
## 168   168 0.03236670 0.2111507 0.02481660 0.0006351916 0.01715201
## 169   169 0.03237049 0.2109936 0.02482058 0.0006333564 0.01717073
## 170   170 0.03236967 0.2110375 0.02482210 0.0006302471 0.01702591
## 171   171 0.03237189 0.2109464 0.02481995 0.0006337130 0.01713502
## 172   172 0.03237175 0.2109598 0.02481885 0.0006318617 0.01716000
## 173   173 0.03237175 0.2109666 0.02481820 0.0006323646 0.01712850
## 174   174 0.03237559 0.2107978 0.02482029 0.0006326830 0.01711150
## 175   175 0.03237711 0.2107333 0.02481982 0.0006322173 0.01727578
## 176   176 0.03237953 0.2106316 0.02482135 0.0006392954 0.01736468
## 177   177 0.03237747 0.2107236 0.02481819 0.0006385685 0.01737274
## 178   178 0.03237223 0.2109475 0.02481421 0.0006434618 0.01749505
## 179   179 0.03236982 0.2110484 0.02481207 0.0006473410 0.01757324
## 180   180 0.03237185 0.2109549 0.02481348 0.0006473768 0.01746645
## 181   181 0.03237371 0.2108729 0.02481463 0.0006468049 0.01737371
## 182   182 0.03237232 0.2109461 0.02481708 0.0006474986 0.01741364
## 183   183 0.03237082 0.2110170 0.02481548 0.0006495061 0.01758008
## 184   184 0.03237034 0.2110277 0.02481466 0.0006495325 0.01747987
## 185   185 0.03237032 0.2110391 0.02481405 0.0006458812 0.01743651
## 186   186 0.03237142 0.2110136 0.02481467 0.0006467892 0.01755864
## 187   187 0.03236926 0.2111117 0.02481192 0.0006428598 0.01750422
## 188   188 0.03236611 0.2112479 0.02480845 0.0006437613 0.01757012
## 189   189 0.03236603 0.2112418 0.02480842 0.0006418901 0.01740982
## 190   190 0.03236454 0.2113082 0.02480704 0.0006439704 0.01740493
## 191   191 0.03236711 0.2112022 0.02480954 0.0006450002 0.01741959
## 192   192 0.03236698 0.2112050 0.02480889 0.0006421717 0.01731728
## 193   193 0.03236698 0.2112192 0.02480998 0.0006384945 0.01731983
## 194   194 0.03236981 0.2110987 0.02481343 0.0006374238 0.01726627
## 195   195 0.03237075 0.2110552 0.02481483 0.0006378702 0.01726517
## 196   196 0.03237025 0.2110769 0.02481298 0.0006365499 0.01716865
## 197   197 0.03237159 0.2110195 0.02481482 0.0006376230 0.01720170
## 198   198 0.03237488 0.2108702 0.02481745 0.0006372231 0.01717725
## 199   199 0.03237231 0.2109842 0.02481450 0.0006378298 0.01705938
## 200   200 0.03237361 0.2109294 0.02481468 0.0006374305 0.01700977
## 201   201 0.03237529 0.2108556 0.02481545 0.0006383109 0.01701602
## 202   202 0.03237609 0.2108178 0.02481570 0.0006374860 0.01700760
## 203   203 0.03237886 0.2107060 0.02481771 0.0006335500 0.01694439
## 204   204 0.03237992 0.2106613 0.02481818 0.0006322319 0.01687075
## 205   205 0.03238100 0.2106208 0.02481862 0.0006333379 0.01688164
## 206   206 0.03237998 0.2106600 0.02481831 0.0006306450 0.01684523
## 207   207 0.03238022 0.2106458 0.02481808 0.0006304548 0.01682094
## 208   208 0.03238176 0.2105763 0.02481748 0.0006294251 0.01673578
## 209   209 0.03238406 0.2104716 0.02481876 0.0006289370 0.01669033
## 210   210 0.03238518 0.2104278 0.02482013 0.0006283227 0.01672104
## 211   211 0.03238582 0.2104140 0.02482122 0.0006277827 0.01674221
## 212   212 0.03238554 0.2104239 0.02482172 0.0006280513 0.01674109
## 213   213 0.03238479 0.2104645 0.02482073 0.0006279515 0.01686254
## 214   214 0.03238646 0.2103951 0.02482216 0.0006281572 0.01692305
## 215   215 0.03238514 0.2104512 0.02482065 0.0006274765 0.01689101
## 216   216 0.03238457 0.2104765 0.02482086 0.0006276832 0.01689665
## 217   217 0.03238495 0.2104596 0.02482086 0.0006284798 0.01690350
## 218   218 0.03238404 0.2104967 0.02482088 0.0006274708 0.01687614
## 219   219 0.03238375 0.2105079 0.02482010 0.0006266264 0.01689077
## 220   220 0.03238340 0.2105195 0.02482016 0.0006262486 0.01689334
## 221   221 0.03238413 0.2104852 0.02482106 0.0006259553 0.01692561
## 222   222 0.03238403 0.2104951 0.02482160 0.0006267009 0.01694315
## 223   223 0.03238481 0.2104616 0.02482213 0.0006290973 0.01698556
## 224   224 0.03238474 0.2104625 0.02482211 0.0006296008 0.01699212
## 225   225 0.03238457 0.2104718 0.02482250 0.0006297176 0.01699121
## 226   226 0.03238509 0.2104504 0.02482281 0.0006300274 0.01699901
## 227   227 0.03238365 0.2105133 0.02482164 0.0006296393 0.01701502
## 228   228 0.03238433 0.2104831 0.02482198 0.0006306961 0.01704026
## 229   229 0.03238361 0.2105155 0.02482148 0.0006313660 0.01708232
## 230   230 0.03238380 0.2105102 0.02482165 0.0006315468 0.01710672
## 231   231 0.03238317 0.2105367 0.02482155 0.0006313958 0.01711003
## 232   232 0.03238338 0.2105303 0.02482165 0.0006311156 0.01712688
## 233   233 0.03238360 0.2105224 0.02482189 0.0006309780 0.01712101
## 234   234 0.03238380 0.2105145 0.02482210 0.0006309593 0.01712433
## 235   235 0.03238392 0.2105097 0.02482210 0.0006306562 0.01712586
## 236   236 0.03238387 0.2105111 0.02482191 0.0006310423 0.01713022
## 237   237 0.03238369 0.2105198 0.02482176 0.0006305821 0.01712627
## 238   238 0.03238353 0.2105262 0.02482167 0.0006304126 0.01712057
## 239   239 0.03238357 0.2105246 0.02482160 0.0006303685 0.01712324
## 240   240 0.03238358 0.2105244 0.02482162 0.0006304950 0.01712347
##            MAESD
## 1   0.0005035442
## 2   0.0005078511
## 3   0.0006889891
## 4   0.0007548738
## 5   0.0007385957
## 6   0.0006920028
## 7   0.0006664314
## 8   0.0006186643
## 9   0.0006469633
## 10  0.0005912182
## 11  0.0006289068
## 12  0.0006182943
## 13  0.0006177591
## 14  0.0006042341
## 15  0.0005919262
## 16  0.0005803183
## 17  0.0005660014
## 18  0.0005649196
## 19  0.0005795214
## 20  0.0005748058
## 21  0.0005584302
## 22  0.0005755803
## 23  0.0005833940
## 24  0.0005788928
## 25  0.0005842247
## 26  0.0005819341
## 27  0.0005785205
## 28  0.0005786257
## 29  0.0005757291
## 30  0.0005727497
## 31  0.0005594809
## 32  0.0005516421
## 33  0.0005377413
## 34  0.0005314867
## 35  0.0005303773
## 36  0.0005273627
## 37  0.0005109281
## 38  0.0005250179
## 39  0.0005390701
## 40  0.0005435174
## 41  0.0005276359
## 42  0.0005337206
## 43  0.0005104831
## 44  0.0005039814
## 45  0.0005035164
## 46  0.0005124852
## 47  0.0005081963
## 48  0.0005265689
## 49  0.0005251518
## 50  0.0005109413
## 51  0.0005119585
## 52  0.0005094290
## 53  0.0005078608
## 54  0.0005007868
## 55  0.0005106040
## 56  0.0005089415
## 57  0.0004881742
## 58  0.0004896468
## 59  0.0004874765
## 60  0.0004867578
## 61  0.0004979821
## 62  0.0005021746
## 63  0.0004977222
## 64  0.0004876747
## 65  0.0004915189
## 66  0.0005051581
## 67  0.0004931356
## 68  0.0005084522
## 69  0.0005068789
## 70  0.0004956522
## 71  0.0004923096
## 72  0.0005083383
## 73  0.0005078321
## 74  0.0005056468
## 75  0.0005094471
## 76  0.0005115899
## 77  0.0004970273
## 78  0.0005070696
## 79  0.0005059329
## 80  0.0004930048
## 81  0.0004917971
## 82  0.0005061771
## 83  0.0005054945
## 84  0.0004998241
## 85  0.0005043749
## 86  0.0004881650
## 87  0.0004792757
## 88  0.0004751849
## 89  0.0004695483
## 90  0.0004752359
## 91  0.0004828028
## 92  0.0004923084
## 93  0.0004991760
## 94  0.0005082365
## 95  0.0005115511
## 96  0.0005244380
## 97  0.0005330301
## 98  0.0005315005
## 99  0.0005240982
## 100 0.0005231891
## 101 0.0005156459
## 102 0.0005245803
## 103 0.0005259593
## 104 0.0005266536
## 105 0.0005243956
## 106 0.0005225453
## 107 0.0005264776
## 108 0.0005253699
## 109 0.0005239626
## 110 0.0005249740
## 111 0.0005281292
## 112 0.0005313234
## 113 0.0005220404
## 114 0.0005305363
## 115 0.0005329176
## 116 0.0005323780
## 117 0.0005275143
## 118 0.0005310823
## 119 0.0005270737
## 120 0.0005254264
## 121 0.0005247435
## 122 0.0005206612
## 123 0.0005159590
## 124 0.0005198000
## 125 0.0005186453
## 126 0.0005209535
## 127 0.0005237470
## 128 0.0005207128
## 129 0.0005151591
## 130 0.0005204995
## 131 0.0005187232
## 132 0.0005217210
## 133 0.0005225089
## 134 0.0005261052
## 135 0.0005252619
## 136 0.0005230119
## 137 0.0005148332
## 138 0.0005089392
## 139 0.0005103563
## 140 0.0005056243
## 141 0.0005108661
## 142 0.0005089260
## 143 0.0005124209
## 144 0.0005190744
## 145 0.0005189727
## 146 0.0005203869
## 147 0.0005162867
## 148 0.0005141766
## 149 0.0005150121
## 150 0.0005106060
## 151 0.0005085379
## 152 0.0005057090
## 153 0.0005035278
## 154 0.0004997336
## 155 0.0004944081
## 156 0.0004938861
## 157 0.0004862176
## 158 0.0004853629
## 159 0.0004840322
## 160 0.0004873739
## 161 0.0004845071
## 162 0.0004908851
## 163 0.0004911635
## 164 0.0004958367
## 165 0.0004866900
## 166 0.0004835603
## 167 0.0004812577
## 168 0.0004781780
## 169 0.0004801859
## 170 0.0004778176
## 171 0.0004791438
## 172 0.0004798871
## 173 0.0004802100
## 174 0.0004762915
## 175 0.0004755033
## 176 0.0004818772
## 177 0.0004836964
## 178 0.0004879063
## 179 0.0004923688
## 180 0.0004943780
## 181 0.0004940018
## 182 0.0004960918
## 183 0.0004982899
## 184 0.0004972837
## 185 0.0004966243
## 186 0.0004969049
## 187 0.0004978187
## 188 0.0004961570
## 189 0.0004919038
## 190 0.0004924967
## 191 0.0004927377
## 192 0.0004939316
## 193 0.0004917803
## 194 0.0004914580
## 195 0.0004925137
## 196 0.0004904589
## 197 0.0004911009
## 198 0.0004902551
## 199 0.0004902342
## 200 0.0004897564
## 201 0.0004902363
## 202 0.0004923342
## 203 0.0004883890
## 204 0.0004879717
## 205 0.0004875551
## 206 0.0004855396
## 207 0.0004857216
## 208 0.0004857038
## 209 0.0004849272
## 210 0.0004834970
## 211 0.0004840545
## 212 0.0004858542
## 213 0.0004859223
## 214 0.0004853990
## 215 0.0004853695
## 216 0.0004854445
## 217 0.0004849944
## 218 0.0004840414
## 219 0.0004843740
## 220 0.0004845801
## 221 0.0004840746
## 222 0.0004844861
## 223 0.0004856146
## 224 0.0004856397
## 225 0.0004852937
## 226 0.0004857069
## 227 0.0004855894
## 228 0.0004858120
## 229 0.0004872522
## 230 0.0004869581
## 231 0.0004874034
## 232 0.0004878834
## 233 0.0004875616
## 234 0.0004873691
## 235 0.0004872939
## 236 0.0004875048
## 237 0.0004874131
## 238 0.0004874002
## 239 0.0004873677
## 240 0.0004875010
##    nvmax
## 12    12

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.9914336190  1.9842730715  1.9985941665
## x4          -0.0000526423 -0.0000700820 -0.0000352026
## x7           0.0111862837  0.0099543637  0.0124182036
## x8           0.0004690108  0.0001826029  0.0007554187
## x9           0.0038170729  0.0031744774  0.0044596685
## x10          0.0009995995  0.0004009321  0.0015982668
## x16          0.0009666310  0.0005522227  0.0013810393
## x17          0.0014834664  0.0008548562  0.0021120766
## stat14      -0.0009951099 -0.0014713783 -0.0005188415
## stat98       0.0034421836  0.0029701194  0.0039142477
## stat110     -0.0032377891 -0.0037177908 -0.0027577874
## stat149     -0.0008689431 -0.0013557374 -0.0003821488
## sqrt.x18     0.0267030082  0.0248675733  0.0285384431

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.045   2.084   2.097   2.097   2.109   2.143 
## [1] "leapForward  Test MSE: 0.00097648551851115"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 37 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD  RsquaredSD
## 1       1 0.02889846 0.1473540 0.02339940 0.0007134151 0.009987013
## 2       2 0.02771803 0.2165405 0.02253170 0.0006944449 0.025945344
## 3       3 0.02710862 0.2507693 0.02197082 0.0005989025 0.028698566
## 4       4 0.02653713 0.2821401 0.02128824 0.0006615569 0.034159341
## 5       5 0.02608466 0.3059575 0.02097091 0.0006508910 0.030710048
## 6       6 0.02593409 0.3137841 0.02087253 0.0005915676 0.028883207
## 7       7 0.02592217 0.3143957 0.02090517 0.0006115201 0.031180139
## 8       8 0.02582133 0.3199456 0.02085804 0.0005953075 0.031445137
## 9       9 0.02576415 0.3228265 0.02082125 0.0005997096 0.032488117
## 10     10 0.02568801 0.3266456 0.02078380 0.0006045909 0.033584333
## 11     11 0.02566619 0.3278194 0.02077293 0.0005668776 0.031962892
## 12     12 0.02566777 0.3276937 0.02078077 0.0005525641 0.030649412
## 13     13 0.02569466 0.3263611 0.02080091 0.0005346902 0.032184814
## 14     14 0.02571543 0.3253304 0.02082811 0.0005288364 0.031641157
## 15     15 0.02572808 0.3247118 0.02085077 0.0005290211 0.031394546
## 16     16 0.02575400 0.3233946 0.02086876 0.0004990719 0.029484047
## 17     17 0.02577393 0.3224341 0.02089030 0.0004882110 0.029680113
## 18     18 0.02578255 0.3220217 0.02090062 0.0004912496 0.030506581
## 19     19 0.02579240 0.3215967 0.02091523 0.0004845376 0.030909017
## 20     20 0.02576944 0.3228066 0.02090399 0.0004634363 0.031446896
## 21     21 0.02573621 0.3245750 0.02088965 0.0004512859 0.032014901
## 22     22 0.02572635 0.3251851 0.02087045 0.0004337188 0.031605190
## 23     23 0.02571958 0.3255628 0.02087156 0.0004375977 0.030970140
## 24     24 0.02569768 0.3266238 0.02086027 0.0004255377 0.030285485
## 25     25 0.02569040 0.3270545 0.02084714 0.0004376583 0.030552104
## 26     26 0.02568583 0.3274160 0.02083581 0.0004336420 0.031858663
## 27     27 0.02569668 0.3268547 0.02084428 0.0004271602 0.032030015
## 28     28 0.02569672 0.3268663 0.02086760 0.0004219292 0.032516567
## 29     29 0.02568884 0.3272641 0.02085059 0.0004237113 0.031989197
## 30     30 0.02566505 0.3285542 0.02082348 0.0003848477 0.031356378
## 31     31 0.02564872 0.3294689 0.02082266 0.0003889204 0.032122923
## 32     32 0.02563377 0.3302587 0.02081153 0.0004046217 0.032001352
## 33     33 0.02563086 0.3303557 0.02080532 0.0004308875 0.031229399
## 34     34 0.02561890 0.3309277 0.02079301 0.0004231271 0.030260297
## 35     35 0.02562779 0.3305715 0.02080576 0.0004087837 0.029552489
## 36     36 0.02563731 0.3301156 0.02081306 0.0004258703 0.029654896
## 37     37 0.02560595 0.3316195 0.02078575 0.0004304441 0.029061331
## 38     38 0.02562262 0.3308117 0.02079966 0.0004391735 0.029703678
## 39     39 0.02564804 0.3296016 0.02082066 0.0004213670 0.029596622
## 40     40 0.02564160 0.3299854 0.02081466 0.0004252076 0.029799638
## 41     41 0.02565929 0.3291167 0.02083322 0.0004150840 0.029591476
## 42     42 0.02567372 0.3283605 0.02084554 0.0004223149 0.029383237
## 43     43 0.02568626 0.3276804 0.02085486 0.0004241440 0.029338597
## 44     44 0.02568122 0.3279255 0.02085489 0.0004435497 0.029646303
## 45     45 0.02569831 0.3271317 0.02086375 0.0004482350 0.029733441
## 46     46 0.02571254 0.3263794 0.02087637 0.0004366591 0.029457848
## 47     47 0.02571625 0.3262144 0.02088689 0.0004236902 0.029886157
## 48     48 0.02573478 0.3253104 0.02089320 0.0004199468 0.029793005
## 49     49 0.02574554 0.3248137 0.02091064 0.0004080779 0.029678483
## 50     50 0.02573387 0.3254383 0.02089433 0.0004062457 0.029999463
## 51     51 0.02574938 0.3246516 0.02091164 0.0004182721 0.029785135
## 52     52 0.02575252 0.3245230 0.02091117 0.0004136121 0.029167957
## 53     53 0.02574345 0.3249338 0.02090767 0.0004037086 0.029081585
## 54     54 0.02575271 0.3244439 0.02091205 0.0004019305 0.029291629
## 55     55 0.02576450 0.3238839 0.02091798 0.0003963987 0.029473425
## 56     56 0.02576557 0.3238972 0.02092414 0.0003927203 0.029099033
## 57     57 0.02576843 0.3237412 0.02093037 0.0004011597 0.028995298
## 58     58 0.02576638 0.3238600 0.02092269 0.0003982231 0.028965912
## 59     59 0.02576147 0.3241184 0.02092162 0.0003950903 0.029309249
## 60     60 0.02576171 0.3241423 0.02092541 0.0003822488 0.028411270
## 61     61 0.02576373 0.3240125 0.02093164 0.0003806297 0.028563156
## 62     62 0.02575127 0.3246336 0.02092387 0.0003796099 0.028678675
## 63     63 0.02574833 0.3248042 0.02092242 0.0003777509 0.028353255
## 64     64 0.02575285 0.3246191 0.02092916 0.0003910038 0.029637641
## 65     65 0.02574928 0.3248212 0.02092490 0.0003940812 0.029752834
## 66     66 0.02574948 0.3248066 0.02092113 0.0003959405 0.029571006
## 67     67 0.02575997 0.3243013 0.02092304 0.0003962149 0.029604295
## 68     68 0.02575777 0.3244540 0.02092412 0.0003957959 0.029542555
## 69     69 0.02577021 0.3238959 0.02093629 0.0003867744 0.029210571
## 70     70 0.02577774 0.3235184 0.02094624 0.0003877042 0.029085700
## 71     71 0.02578621 0.3231470 0.02095196 0.0003973188 0.029657915
## 72     72 0.02578201 0.3233723 0.02095276 0.0004007668 0.029706986
## 73     73 0.02578222 0.3233926 0.02095567 0.0003953174 0.030054518
## 74     74 0.02578475 0.3232858 0.02096265 0.0004022207 0.030155786
## 75     75 0.02578500 0.3232705 0.02095910 0.0004130527 0.029913575
## 76     76 0.02578959 0.3230756 0.02096283 0.0004290472 0.029960329
## 77     77 0.02579353 0.3228797 0.02095985 0.0004385083 0.029183424
## 78     78 0.02579452 0.3228372 0.02095143 0.0004333894 0.028775028
## 79     79 0.02578247 0.3234615 0.02094447 0.0004324556 0.029258019
## 80     80 0.02578326 0.3234533 0.02094461 0.0004280296 0.029370888
## 81     81 0.02577872 0.3236438 0.02093410 0.0004285666 0.029400588
## 82     82 0.02577224 0.3239722 0.02092847 0.0004238233 0.028939123
## 83     83 0.02578485 0.3233782 0.02093667 0.0004394880 0.029623819
## 84     84 0.02578605 0.3233423 0.02093016 0.0004298172 0.029353337
## 85     85 0.02578387 0.3234667 0.02092953 0.0004223454 0.029568970
## 86     86 0.02577533 0.3239210 0.02091799 0.0004214527 0.029483778
## 87     87 0.02577418 0.3240023 0.02091349 0.0004249286 0.029568900
## 88     88 0.02576733 0.3243269 0.02091030 0.0004281580 0.030151231
## 89     89 0.02577132 0.3241496 0.02091718 0.0004302645 0.029742151
## 90     90 0.02578126 0.3237220 0.02091488 0.0004271793 0.029969503
## 91     91 0.02577170 0.3242407 0.02089862 0.0004242377 0.029920525
## 92     92 0.02577108 0.3242535 0.02090258 0.0004284533 0.030323680
## 93     93 0.02576606 0.3244994 0.02089528 0.0004177741 0.029902392
## 94     94 0.02576064 0.3247840 0.02089100 0.0004282358 0.030392596
## 95     95 0.02576030 0.3248198 0.02089289 0.0004261258 0.030784042
## 96     96 0.02576706 0.3245232 0.02089979 0.0004315427 0.030126942
## 97     97 0.02575946 0.3249239 0.02089098 0.0004293711 0.030247444
## 98     98 0.02576335 0.3247605 0.02089443 0.0004222494 0.030036539
## 99     99 0.02576245 0.3248318 0.02089463 0.0004323187 0.030187270
## 100   100 0.02575895 0.3250486 0.02088700 0.0004184895 0.029440926
## 101   101 0.02576756 0.3246094 0.02089685 0.0004055355 0.028891418
## 102   102 0.02576088 0.3249562 0.02089782 0.0003936277 0.029067826
## 103   103 0.02576469 0.3248075 0.02089792 0.0003905116 0.029229615
## 104   104 0.02576265 0.3248872 0.02089061 0.0003872241 0.029100932
## 105   105 0.02575632 0.3252308 0.02088496 0.0003985554 0.029075132
## 106   106 0.02576243 0.3249867 0.02088753 0.0003994407 0.028769289
## 107   107 0.02577202 0.3244973 0.02089630 0.0003971027 0.028754492
## 108   108 0.02577511 0.3243438 0.02089566 0.0004006568 0.029142310
## 109   109 0.02578423 0.3239126 0.02090303 0.0004011582 0.029123698
## 110   110 0.02578797 0.3237653 0.02090636 0.0004117695 0.029589453
## 111   111 0.02579413 0.3234687 0.02090759 0.0004154226 0.029772554
## 112   112 0.02579389 0.3234758 0.02090762 0.0004110475 0.030360489
## 113   113 0.02578970 0.3237140 0.02090485 0.0004056168 0.029892016
## 114   114 0.02578627 0.3239288 0.02090216 0.0004079745 0.030078732
## 115   115 0.02577623 0.3244244 0.02089170 0.0004016716 0.029739893
## 116   116 0.02577298 0.3245588 0.02088781 0.0004071699 0.029701799
## 117   117 0.02577212 0.3245534 0.02088619 0.0004135458 0.029689893
## 118   118 0.02578095 0.3241672 0.02089131 0.0004244559 0.030035486
## 119   119 0.02577909 0.3242807 0.02089353 0.0004229975 0.030014164
## 120   120 0.02578438 0.3240452 0.02090035 0.0004249084 0.029956521
## 121   121 0.02578694 0.3239085 0.02089975 0.0004231027 0.030179959
## 122   122 0.02578670 0.3239416 0.02090236 0.0004181276 0.030187505
## 123   123 0.02578624 0.3239829 0.02090213 0.0004268499 0.030380844
## 124   124 0.02578736 0.3239354 0.02090545 0.0004196824 0.029773759
## 125   125 0.02578892 0.3238752 0.02090384 0.0004214593 0.029734570
## 126   126 0.02578485 0.3240726 0.02090599 0.0004278206 0.029958398
## 127   127 0.02578731 0.3239652 0.02090829 0.0004264421 0.030013435
## 128   128 0.02578699 0.3240004 0.02090901 0.0004253291 0.029740400
## 129   129 0.02579703 0.3235161 0.02091489 0.0004274586 0.029793715
## 130   130 0.02579946 0.3234041 0.02091838 0.0004308651 0.029593629
## 131   131 0.02580711 0.3230464 0.02092155 0.0004335240 0.029638169
## 132   132 0.02581632 0.3226214 0.02093516 0.0004350097 0.029343198
## 133   133 0.02581651 0.3226030 0.02093531 0.0004329580 0.029603017
## 134   134 0.02582787 0.3220785 0.02095054 0.0004349988 0.029400132
## 135   135 0.02581622 0.3226303 0.02094100 0.0004364320 0.029469973
## 136   136 0.02581818 0.3225282 0.02094372 0.0004359750 0.029645150
## 137   137 0.02581849 0.3225477 0.02094446 0.0004279618 0.029350079
## 138   138 0.02581894 0.3225165 0.02094833 0.0004352944 0.029406912
## 139   139 0.02582291 0.3223366 0.02094991 0.0004345202 0.029518951
## 140   140 0.02582784 0.3221291 0.02095355 0.0004346397 0.029387949
## 141   141 0.02582456 0.3222838 0.02095284 0.0004258815 0.029270010
## 142   142 0.02582225 0.3224057 0.02095186 0.0004284649 0.029274052
## 143   143 0.02582309 0.3223543 0.02095310 0.0004287349 0.029220128
## 144   144 0.02582225 0.3223797 0.02095529 0.0004215722 0.029261707
## 145   145 0.02582691 0.3221704 0.02096207 0.0004230469 0.029268766
## 146   146 0.02583026 0.3220201 0.02096342 0.0004258150 0.029341765
## 147   147 0.02582797 0.3221198 0.02096107 0.0004310997 0.029292930
## 148   148 0.02582693 0.3221445 0.02095704 0.0004291024 0.029148788
## 149   149 0.02582626 0.3222081 0.02095531 0.0004239732 0.028773745
## 150   150 0.02582794 0.3221414 0.02095888 0.0004204259 0.028971750
## 151   151 0.02583544 0.3217868 0.02096419 0.0004262075 0.029053310
## 152   152 0.02583613 0.3217624 0.02096462 0.0004292530 0.029183252
## 153   153 0.02583315 0.3218916 0.02096171 0.0004313707 0.029150222
## 154   154 0.02583504 0.3218109 0.02095859 0.0004354176 0.029246470
## 155   155 0.02583535 0.3217834 0.02095833 0.0004314159 0.029327695
## 156   156 0.02584056 0.3215349 0.02095987 0.0004306751 0.029493767
## 157   157 0.02584092 0.3215332 0.02096042 0.0004251680 0.029450841
## 158   158 0.02583722 0.3217303 0.02095788 0.0004223559 0.029391066
## 159   159 0.02583382 0.3218985 0.02095357 0.0004203314 0.029368224
## 160   160 0.02583586 0.3217807 0.02095614 0.0004162100 0.029261609
## 161   161 0.02583649 0.3217476 0.02095726 0.0004200939 0.029523916
## 162   162 0.02583813 0.3216856 0.02095740 0.0004150509 0.029438052
## 163   163 0.02583757 0.3217174 0.02095783 0.0004186378 0.029537230
## 164   164 0.02583418 0.3219009 0.02095449 0.0004228816 0.029500374
## 165   165 0.02583438 0.3218970 0.02095379 0.0004292497 0.029456815
## 166   166 0.02583542 0.3218591 0.02095683 0.0004338172 0.029723492
## 167   167 0.02583976 0.3216559 0.02096111 0.0004304410 0.029683079
## 168   168 0.02583990 0.3216516 0.02096008 0.0004354814 0.029551009
## 169   169 0.02583906 0.3216986 0.02095947 0.0004327124 0.029640123
## 170   170 0.02584027 0.3216387 0.02096058 0.0004340969 0.029718601
## 171   171 0.02583865 0.3217165 0.02095664 0.0004378557 0.029682456
## 172   172 0.02584232 0.3215293 0.02096019 0.0004343517 0.029624376
## 173   173 0.02584459 0.3214345 0.02096066 0.0004395498 0.029785924
## 174   174 0.02584443 0.3214500 0.02095940 0.0004391585 0.029717392
## 175   175 0.02584644 0.3213551 0.02096332 0.0004373061 0.029572758
## 176   176 0.02584728 0.3213201 0.02096418 0.0004416956 0.029788118
## 177   177 0.02584833 0.3212774 0.02096414 0.0004391222 0.029935958
## 178   178 0.02585241 0.3211089 0.02096855 0.0004428738 0.030097579
## 179   179 0.02585531 0.3209693 0.02097192 0.0004393081 0.030101139
## 180   180 0.02585702 0.3209023 0.02097305 0.0004405582 0.030209078
## 181   181 0.02585503 0.3210090 0.02097077 0.0004418459 0.030305190
## 182   182 0.02585328 0.3210937 0.02096845 0.0004401805 0.030364939
## 183   183 0.02585139 0.3211792 0.02096554 0.0004420374 0.030665457
## 184   184 0.02585017 0.3212422 0.02096544 0.0004406943 0.030575109
## 185   185 0.02585254 0.3211347 0.02096763 0.0004405051 0.030535709
## 186   186 0.02585337 0.3211003 0.02096678 0.0004382738 0.030600690
## 187   187 0.02585631 0.3209542 0.02096932 0.0004368795 0.030537927
## 188   188 0.02585988 0.3207775 0.02097167 0.0004400894 0.030546950
## 189   189 0.02586043 0.3207536 0.02097262 0.0004411037 0.030475675
## 190   190 0.02586084 0.3207358 0.02097389 0.0004425094 0.030504878
## 191   191 0.02586115 0.3207313 0.02097412 0.0004431290 0.030757660
## 192   192 0.02586135 0.3207157 0.02097564 0.0004431924 0.030772092
## 193   193 0.02586189 0.3207005 0.02097602 0.0004432511 0.030717111
## 194   194 0.02586459 0.3205763 0.02097982 0.0004437553 0.030775415
## 195   195 0.02586672 0.3204750 0.02098236 0.0004441679 0.030816644
## 196   196 0.02586531 0.3205394 0.02098233 0.0004434874 0.030794152
## 197   197 0.02586632 0.3204862 0.02098407 0.0004450520 0.030742461
## 198   198 0.02586997 0.3203162 0.02098704 0.0004445636 0.030742951
## 199   199 0.02586886 0.3203752 0.02098622 0.0004447761 0.030784161
## 200   200 0.02586985 0.3203295 0.02098664 0.0004457366 0.030830012
## 201   201 0.02587203 0.3202206 0.02098863 0.0004457233 0.030855196
## 202   202 0.02587097 0.3202678 0.02098742 0.0004430724 0.030696536
## 203   203 0.02586993 0.3203167 0.02098681 0.0004425744 0.030706221
## 204   204 0.02587282 0.3201821 0.02098954 0.0004427214 0.030742919
## 205   205 0.02587229 0.3202035 0.02099027 0.0004410815 0.030701637
## 206   206 0.02587171 0.3202294 0.02098994 0.0004422303 0.030731374
## 207   207 0.02587274 0.3201816 0.02099160 0.0004421251 0.030746992
## 208   208 0.02587365 0.3201451 0.02099339 0.0004433312 0.030737510
## 209   209 0.02587225 0.3202083 0.02099101 0.0004442333 0.030774396
## 210   210 0.02587085 0.3202772 0.02099004 0.0004441504 0.030825935
## 211   211 0.02587112 0.3202638 0.02098996 0.0004432370 0.030842058
## 212   212 0.02586991 0.3203182 0.02098953 0.0004432351 0.030913517
## 213   213 0.02587091 0.3202636 0.02099087 0.0004426694 0.030921026
## 214   214 0.02587079 0.3202682 0.02099106 0.0004411963 0.030902796
## 215   215 0.02587085 0.3202661 0.02099071 0.0004411895 0.030885704
## 216   216 0.02587108 0.3202537 0.02099052 0.0004409955 0.030882069
## 217   217 0.02587122 0.3202477 0.02099038 0.0004416564 0.030875243
## 218   218 0.02587181 0.3202185 0.02099092 0.0004420340 0.030888877
## 219   219 0.02587280 0.3201744 0.02099174 0.0004414741 0.030875033
## 220   220 0.02587241 0.3201906 0.02099084 0.0004407007 0.030860639
## 221   221 0.02587277 0.3201727 0.02099126 0.0004399664 0.030855239
## 222   222 0.02587317 0.3201544 0.02099172 0.0004400663 0.030822937
## 223   223 0.02587364 0.3201282 0.02099264 0.0004394369 0.030788725
## 224   224 0.02587386 0.3201165 0.02099260 0.0004387836 0.030794969
## 225   225 0.02587445 0.3200890 0.02099293 0.0004386687 0.030798370
## 226   226 0.02587387 0.3201163 0.02099235 0.0004387038 0.030815747
## 227   227 0.02587338 0.3201375 0.02099237 0.0004389619 0.030846027
## 228   228 0.02587336 0.3201349 0.02099205 0.0004389470 0.030840055
## 229   229 0.02587397 0.3201082 0.02099267 0.0004392338 0.030844485
## 230   230 0.02587352 0.3201269 0.02099247 0.0004391111 0.030839317
## 231   231 0.02587391 0.3201081 0.02099281 0.0004392433 0.030837420
## 232   232 0.02587420 0.3200940 0.02099298 0.0004392698 0.030842236
## 233   233 0.02587411 0.3200966 0.02099304 0.0004393985 0.030858542
## 234   234 0.02587423 0.3200912 0.02099286 0.0004397376 0.030856570
## 235   235 0.02587422 0.3200920 0.02099292 0.0004395790 0.030850723
## 236   236 0.02587439 0.3200851 0.02099309 0.0004394687 0.030836930
## 237   237 0.02587438 0.3200854 0.02099307 0.0004394771 0.030839524
## 238   238 0.02587455 0.3200776 0.02099327 0.0004395125 0.030831645
## 239   239 0.02587451 0.3200794 0.02099325 0.0004394825 0.030826420
## 240   240 0.02587453 0.3200784 0.02099330 0.0004394797 0.030826938
##            MAESD
## 1   0.0005549779
## 2   0.0005096954
## 3   0.0004777971
## 4   0.0004916343
## 5   0.0004714907
## 6   0.0004350187
## 7   0.0004591533
## 8   0.0004763347
## 9   0.0005008133
## 10  0.0005344009
## 11  0.0005243454
## 12  0.0005135503
## 13  0.0004836637
## 14  0.0004700333
## 15  0.0004812190
## 16  0.0004640750
## 17  0.0004721411
## 18  0.0004858347
## 19  0.0004729299
## 20  0.0004641255
## 21  0.0004593089
## 22  0.0004507001
## 23  0.0004485767
## 24  0.0004363133
## 25  0.0004492138
## 26  0.0004631239
## 27  0.0004546356
## 28  0.0004566606
## 29  0.0004718960
## 30  0.0004634761
## 31  0.0004557104
## 32  0.0004722786
## 33  0.0004928964
## 34  0.0004904788
## 35  0.0004886185
## 36  0.0004970889
## 37  0.0005015072
## 38  0.0005305437
## 39  0.0005168950
## 40  0.0005329054
## 41  0.0005333761
## 42  0.0005382613
## 43  0.0005377856
## 44  0.0005483700
## 45  0.0005447344
## 46  0.0005303671
## 47  0.0005189933
## 48  0.0005157229
## 49  0.0005040698
## 50  0.0005212116
## 51  0.0005205147
## 52  0.0005258691
## 53  0.0005133906
## 54  0.0005072688
## 55  0.0004912600
## 56  0.0004851962
## 57  0.0004846383
## 58  0.0004828169
## 59  0.0004703161
## 60  0.0004563182
## 61  0.0004456206
## 62  0.0004505283
## 63  0.0004398010
## 64  0.0004462074
## 65  0.0004532676
## 66  0.0004623461
## 67  0.0004609277
## 68  0.0004568777
## 69  0.0004505716
## 70  0.0004524783
## 71  0.0004613728
## 72  0.0004695044
## 73  0.0004662184
## 74  0.0004769144
## 75  0.0004836196
## 76  0.0004910327
## 77  0.0005079410
## 78  0.0005029657
## 79  0.0005045272
## 80  0.0004991375
## 81  0.0005008533
## 82  0.0004961592
## 83  0.0005059900
## 84  0.0004922592
## 85  0.0004853814
## 86  0.0004795489
## 87  0.0004804587
## 88  0.0004886895
## 89  0.0004871631
## 90  0.0004861779
## 91  0.0004901194
## 92  0.0004915621
## 93  0.0004796259
## 94  0.0004855815
## 95  0.0004833962
## 96  0.0004794292
## 97  0.0004720930
## 98  0.0004667843
## 99  0.0004818083
## 100 0.0004690213
## 101 0.0004505235
## 102 0.0004480537
## 103 0.0004472925
## 104 0.0004424367
## 105 0.0004496796
## 106 0.0004426830
## 107 0.0004378387
## 108 0.0004387464
## 109 0.0004365471
## 110 0.0004412811
## 111 0.0004423049
## 112 0.0004476071
## 113 0.0004389761
## 114 0.0004399308
## 115 0.0004342739
## 116 0.0004400070
## 117 0.0004467156
## 118 0.0004544192
## 119 0.0004545011
## 120 0.0004585876
## 121 0.0004573325
## 122 0.0004487731
## 123 0.0004488171
## 124 0.0004393890
## 125 0.0004414202
## 126 0.0004509682
## 127 0.0004515285
## 128 0.0004478261
## 129 0.0004520312
## 130 0.0004506811
## 131 0.0004535289
## 132 0.0004568275
## 133 0.0004603788
## 134 0.0004550439
## 135 0.0004626615
## 136 0.0004636534
## 137 0.0004568884
## 138 0.0004632200
## 139 0.0004641812
## 140 0.0004602848
## 141 0.0004591625
## 142 0.0004574557
## 143 0.0004554347
## 144 0.0004478951
## 145 0.0004516291
## 146 0.0004508182
## 147 0.0004571509
## 148 0.0004553959
## 149 0.0004497215
## 150 0.0004450844
## 151 0.0004501114
## 152 0.0004551843
## 153 0.0004563961
## 154 0.0004600508
## 155 0.0004586903
## 156 0.0004604975
## 157 0.0004598109
## 158 0.0004562253
## 159 0.0004567402
## 160 0.0004533190
## 161 0.0004574246
## 162 0.0004572054
## 163 0.0004610068
## 164 0.0004656971
## 165 0.0004700375
## 166 0.0004750131
## 167 0.0004733808
## 168 0.0004748677
## 169 0.0004711920
## 170 0.0004738266
## 171 0.0004763985
## 172 0.0004747545
## 173 0.0004780649
## 174 0.0004775324
## 175 0.0004754924
## 176 0.0004799754
## 177 0.0004809116
## 178 0.0004844787
## 179 0.0004803685
## 180 0.0004831524
## 181 0.0004837986
## 182 0.0004846183
## 183 0.0004885441
## 184 0.0004853670
## 185 0.0004856915
## 186 0.0004845320
## 187 0.0004844473
## 188 0.0004866181
## 189 0.0004887452
## 190 0.0004913864
## 191 0.0004937543
## 192 0.0004932998
## 193 0.0004916191
## 194 0.0004922082
## 195 0.0004929967
## 196 0.0004919815
## 197 0.0004923373
## 198 0.0004906948
## 199 0.0004905121
## 200 0.0004912936
## 201 0.0004910381
## 202 0.0004886099
## 203 0.0004893262
## 204 0.0004890910
## 205 0.0004890457
## 206 0.0004890933
## 207 0.0004890946
## 208 0.0004895490
## 209 0.0004907959
## 210 0.0004912223
## 211 0.0004910423
## 212 0.0004907307
## 213 0.0004912370
## 214 0.0004909228
## 215 0.0004904021
## 216 0.0004909617
## 217 0.0004908784
## 218 0.0004910075
## 219 0.0004905535
## 220 0.0004899024
## 221 0.0004896586
## 222 0.0004891934
## 223 0.0004885874
## 224 0.0004880087
## 225 0.0004879420
## 226 0.0004879251
## 227 0.0004884753
## 228 0.0004886225
## 229 0.0004888505
## 230 0.0004888986
## 231 0.0004891834
## 232 0.0004890514
## 233 0.0004893139
## 234 0.0004896283
## 235 0.0004894899
## 236 0.0004893882
## 237 0.0004895430
## 238 0.0004895299
## 239 0.0004895184
## 240 0.0004895355
##    nvmax
## 37    37

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.959142e+00  1.945831e+00  1.972453e+00
## x4          -5.894348e-05 -7.322863e-05 -4.465833e-05
## x7           1.211026e-02  1.109894e-02  1.312159e-02
## x8           5.462716e-04  3.115475e-04  7.809957e-04
## x9           3.648840e-03  3.124097e-03  4.173583e-03
## x10          1.345163e-03  8.551357e-04  1.835190e-03
## x11          2.162103e+05  9.916029e+04  3.332603e+05
## x16          1.005084e-03  6.661899e-04  1.343978e-03
## x17          1.482193e-03  9.672475e-04  1.997139e-03
## x21          1.385246e-04  7.124036e-05  2.058087e-04
## stat3        5.034393e-04  1.083744e-04  8.985042e-04
## stat4       -5.606954e-04 -9.559051e-04 -1.654857e-04
## stat5       -4.413992e-04 -8.351587e-04 -4.763964e-05
## stat13      -4.578341e-04 -8.487487e-04 -6.691953e-05
## stat14      -1.099388e-03 -1.489315e-03 -7.094609e-04
## stat23       5.162950e-04  1.243124e-04  9.082775e-04
## stat24      -5.911580e-04 -9.842787e-04 -1.980373e-04
## stat25      -5.357640e-04 -9.276241e-04 -1.439038e-04
## stat26      -6.406763e-04 -1.033769e-03 -2.475836e-04
## stat38       5.722989e-04  1.808493e-04  9.637485e-04
## stat41      -5.056181e-04 -8.942051e-04 -1.170312e-04
## stat86       5.229102e-04  1.304384e-04  9.153820e-04
## stat92      -4.321695e-04 -8.264848e-04 -3.785418e-05
## stat98       3.346626e-03  2.960682e-03  3.732570e-03
## stat99       5.404169e-04  1.441039e-04  9.367298e-04
## stat100      6.042630e-04  2.132818e-04  9.952441e-04
## stat110     -3.145933e-03 -3.538213e-03 -2.753653e-03
## stat128     -4.711014e-04 -8.623582e-04 -7.984453e-05
## stat144      5.103416e-04  1.194153e-04  9.012679e-04
## stat146     -7.190819e-04 -1.113858e-03 -3.243059e-04
## stat149     -6.227483e-04 -1.021557e-03 -2.239393e-04
## stat156      5.661773e-04  1.702279e-04  9.621268e-04
## stat172      5.373333e-04  1.450602e-04  9.296065e-04
## stat184      4.168218e-04  2.233214e-05  8.113115e-04
## stat207      5.743580e-04  1.814944e-04  9.672215e-04
## stat210     -4.508046e-04 -8.453578e-04 -5.625139e-05
## stat217      5.188598e-04  1.248745e-04  9.128452e-04
## sqrt.x18     2.651474e-02  2.501758e-02  2.801189e-02

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.038   2.080   2.093   2.093   2.106   2.151 
## [1] "leapForward  Test MSE: 0.000996440427511273"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))

  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03426821 0.1094342 0.02668433 0.0007671751 0.01830599
## 2       2 0.03341813 0.1535728 0.02594003 0.0008198294 0.02639043
## 3       3 0.03287257 0.1816561 0.02541294 0.0008503685 0.03153037
## 4       4 0.03239628 0.2054364 0.02473878 0.0008624627 0.03152274
## 5       5 0.03201215 0.2239753 0.02445310 0.0008662346 0.02725898
## 6       6 0.03191524 0.2285619 0.02438627 0.0008360375 0.02470748
## 7       7 0.03191420 0.2285037 0.02442852 0.0007968190 0.02338661
## 8       8 0.03185561 0.2313736 0.02438606 0.0007411257 0.02370581
## 9       9 0.03183263 0.2324701 0.02435534 0.0007863111 0.02567275
## 10     10 0.03180391 0.2338324 0.02433061 0.0007389905 0.02451299
## 11     11 0.03180100 0.2340895 0.02433364 0.0007557523 0.02594161
## 12     12 0.03177341 0.2353893 0.02431505 0.0007382138 0.02432139
## 13     13 0.03177927 0.2351046 0.02431925 0.0007220642 0.02438588
## 14     14 0.03178458 0.2348190 0.02433793 0.0006950445 0.02268287
## 15     15 0.03180628 0.2337481 0.02435280 0.0006775356 0.02121300
## 16     16 0.03182834 0.2327028 0.02437532 0.0006658929 0.02087200
## 17     17 0.03183880 0.2322673 0.02438065 0.0006477234 0.02002256
## 18     18 0.03185728 0.2313949 0.02439923 0.0006370540 0.01992706
## 19     19 0.03185190 0.2316933 0.02442363 0.0006572487 0.02061177
## 20     20 0.03186365 0.2312027 0.02442326 0.0006482482 0.02010643
## 21     21 0.03188870 0.2300331 0.02445038 0.0006641959 0.01951572
## 22     22 0.03192348 0.2284313 0.02446307 0.0006696603 0.01875481
## 23     23 0.03191531 0.2288032 0.02445972 0.0006896337 0.01921733
## 24     24 0.03192927 0.2282128 0.02447226 0.0006939932 0.01957920
## 25     25 0.03194151 0.2277033 0.02447714 0.0006893063 0.01969454
## 26     26 0.03194479 0.2276271 0.02448632 0.0006878570 0.01936287
## 27     27 0.03192728 0.2284407 0.02447571 0.0006824068 0.01947629
## 28     28 0.03190266 0.2296285 0.02446193 0.0006900168 0.01998285
## 29     29 0.03190144 0.2297153 0.02446704 0.0006683478 0.02026791
## 30     30 0.03190411 0.2295599 0.02447441 0.0006612030 0.02024875
## 31     31 0.03192245 0.2287775 0.02449775 0.0006419751 0.01990136
## 32     32 0.03190411 0.2295923 0.02447790 0.0006353917 0.01967054
## 33     33 0.03191769 0.2290308 0.02448798 0.0006330710 0.02078936
## 34     34 0.03192997 0.2284652 0.02449369 0.0006250619 0.02014378
## 35     35 0.03193159 0.2284100 0.02449306 0.0006337653 0.01961509
## 36     36 0.03194318 0.2279025 0.02450866 0.0006278046 0.01966998
## 37     37 0.03194128 0.2280250 0.02450292 0.0006062094 0.01949993
## 38     38 0.03193948 0.2281609 0.02449793 0.0006112787 0.01995649
## 39     39 0.03195435 0.2275080 0.02449846 0.0006033841 0.01982602
## 40     40 0.03197714 0.2265101 0.02450479 0.0006112463 0.01960349
## 41     41 0.03197246 0.2267294 0.02450782 0.0006119962 0.01939374
## 42     42 0.03198255 0.2263004 0.02450682 0.0006238639 0.01950603
## 43     43 0.03199114 0.2258997 0.02451161 0.0006060188 0.01947396
## 44     44 0.03200028 0.2255004 0.02451714 0.0005998673 0.01912384
## 45     45 0.03200645 0.2252130 0.02452023 0.0006270249 0.01879333
## 46     46 0.03200449 0.2253257 0.02452497 0.0006339536 0.01855949
## 47     47 0.03200721 0.2252633 0.02453224 0.0006260072 0.01800776
## 48     48 0.03200590 0.2253953 0.02453203 0.0006209163 0.01827852
## 49     49 0.03202952 0.2243466 0.02455568 0.0006211575 0.01776978
## 50     50 0.03204547 0.2236881 0.02457129 0.0006238105 0.01796367
## 51     51 0.03205544 0.2232613 0.02457713 0.0006259454 0.01812826
## 52     52 0.03206918 0.2226891 0.02458787 0.0006315424 0.01814300
## 53     53 0.03208895 0.2217951 0.02460633 0.0006395138 0.01786279
## 54     54 0.03210303 0.2211342 0.02461607 0.0006433302 0.01792698
## 55     55 0.03210594 0.2210422 0.02461954 0.0006460143 0.01782366
## 56     56 0.03211827 0.2204896 0.02463065 0.0006405197 0.01792998
## 57     57 0.03212192 0.2203932 0.02463209 0.0006460350 0.01810381
## 58     58 0.03211789 0.2205931 0.02462150 0.0006482055 0.01856998
## 59     59 0.03212504 0.2203253 0.02462912 0.0006357266 0.01874442
## 60     60 0.03213963 0.2196718 0.02464398 0.0006303290 0.01829393
## 61     61 0.03214514 0.2194668 0.02464477 0.0006266548 0.01846410
## 62     62 0.03215012 0.2192727 0.02465223 0.0006284261 0.01873517
## 63     63 0.03216315 0.2186899 0.02466821 0.0006260267 0.01897050
## 64     64 0.03215916 0.2188844 0.02466642 0.0006315881 0.01853513
## 65     65 0.03215913 0.2189223 0.02466479 0.0006435623 0.01908672
## 66     66 0.03217069 0.2184659 0.02468194 0.0006537632 0.01921302
## 67     67 0.03217246 0.2183633 0.02468962 0.0006464014 0.01884610
## 68     68 0.03218167 0.2179864 0.02469864 0.0006563836 0.01883415
## 69     69 0.03217754 0.2182181 0.02469412 0.0006534367 0.01873611
## 70     70 0.03217715 0.2182067 0.02469706 0.0006534379 0.01838901
## 71     71 0.03217845 0.2181868 0.02469606 0.0006481896 0.01800013
## 72     72 0.03218175 0.2180792 0.02469187 0.0006613731 0.01804296
## 73     73 0.03218922 0.2177544 0.02470081 0.0006561293 0.01776743
## 74     74 0.03219805 0.2173892 0.02470531 0.0006537983 0.01769752
## 75     75 0.03220056 0.2172983 0.02470376 0.0006546933 0.01750838
## 76     76 0.03219960 0.2173790 0.02470430 0.0006420863 0.01745282
## 77     77 0.03221076 0.2169579 0.02471522 0.0006420545 0.01744785
## 78     78 0.03221259 0.2169375 0.02471277 0.0006363800 0.01756123
## 79     79 0.03221229 0.2169183 0.02471441 0.0006330709 0.01719641
## 80     80 0.03221515 0.2168057 0.02470895 0.0006377304 0.01731052
## 81     81 0.03222343 0.2164590 0.02471839 0.0006460268 0.01747258
## 82     82 0.03222760 0.2163016 0.02472074 0.0006426179 0.01764961
## 83     83 0.03222324 0.2165096 0.02471669 0.0006358390 0.01767193
## 84     84 0.03222876 0.2162637 0.02471719 0.0006322749 0.01742355
## 85     85 0.03224167 0.2157128 0.02473131 0.0006269015 0.01703388
## 86     86 0.03224373 0.2156846 0.02472458 0.0006219480 0.01741569
## 87     87 0.03223786 0.2159581 0.02472109 0.0006247951 0.01723040
## 88     88 0.03223807 0.2159160 0.02472269 0.0006223346 0.01691473
## 89     89 0.03223689 0.2159942 0.02472481 0.0006250902 0.01666804
## 90     90 0.03223717 0.2160078 0.02473126 0.0006305339 0.01670149
## 91     91 0.03223474 0.2161405 0.02472957 0.0006362253 0.01651689
## 92     92 0.03223465 0.2161808 0.02473202 0.0006355866 0.01650353
## 93     93 0.03223231 0.2162824 0.02473141 0.0006337980 0.01641068
## 94     94 0.03224504 0.2157520 0.02473673 0.0006432263 0.01652794
## 95     95 0.03224826 0.2156342 0.02473576 0.0006417666 0.01652434
## 96     96 0.03225596 0.2153272 0.02474437 0.0006368149 0.01629715
## 97     97 0.03225665 0.2153103 0.02473522 0.0006450020 0.01630168
## 98     98 0.03225906 0.2152390 0.02473918 0.0006500772 0.01635837
## 99     99 0.03225801 0.2152988 0.02474020 0.0006485783 0.01629891
## 100   100 0.03225288 0.2155203 0.02473668 0.0006454845 0.01616930
## 101   101 0.03225868 0.2152880 0.02473954 0.0006484554 0.01636627
## 102   102 0.03225827 0.2152536 0.02473717 0.0006469643 0.01591422
## 103   103 0.03225500 0.2153884 0.02473211 0.0006458809 0.01619118
## 104   104 0.03226720 0.2149026 0.02474307 0.0006371186 0.01647627
## 105   105 0.03226544 0.2150249 0.02474696 0.0006460556 0.01682609
## 106   106 0.03226473 0.2150569 0.02474646 0.0006473876 0.01664651
## 107   107 0.03225565 0.2154736 0.02474042 0.0006540488 0.01698026
## 108   108 0.03225643 0.2155123 0.02473742 0.0006587749 0.01735725
## 109   109 0.03225798 0.2154642 0.02473926 0.0006590486 0.01752157
## 110   110 0.03226389 0.2151772 0.02474425 0.0006612289 0.01716733
## 111   111 0.03226534 0.2151385 0.02474669 0.0006590401 0.01764085
## 112   112 0.03227139 0.2148855 0.02475089 0.0006572880 0.01736114
## 113   113 0.03227536 0.2146966 0.02475367 0.0006587309 0.01695699
## 114   114 0.03227515 0.2146750 0.02475800 0.0006601393 0.01709099
## 115   115 0.03227548 0.2146982 0.02475842 0.0006631260 0.01722461
## 116   116 0.03227068 0.2149312 0.02476080 0.0006683730 0.01712393
## 117   117 0.03227102 0.2149509 0.02475818 0.0006655914 0.01723389
## 118   118 0.03226799 0.2151188 0.02475254 0.0006662985 0.01768271
## 119   119 0.03226974 0.2150519 0.02475581 0.0006630865 0.01758014
## 120   120 0.03227558 0.2147942 0.02475841 0.0006591192 0.01753872
## 121   121 0.03227652 0.2147575 0.02475735 0.0006619917 0.01768025
## 122   122 0.03227784 0.2147515 0.02476139 0.0006563096 0.01775062
## 123   123 0.03228361 0.2145059 0.02476546 0.0006677366 0.01816368
## 124   124 0.03228093 0.2146267 0.02476110 0.0006719781 0.01806851
## 125   125 0.03228636 0.2143991 0.02476602 0.0006790922 0.01839222
## 126   126 0.03229100 0.2141806 0.02477348 0.0006795299 0.01802906
## 127   127 0.03229601 0.2139592 0.02477433 0.0006806885 0.01803110
## 128   128 0.03230330 0.2136487 0.02478026 0.0006745428 0.01780742
## 129   129 0.03230390 0.2136115 0.02477806 0.0006695738 0.01770701
## 130   130 0.03230538 0.2135557 0.02477227 0.0006742962 0.01752051
## 131   131 0.03231301 0.2132563 0.02477691 0.0006723194 0.01748642
## 132   132 0.03231515 0.2131570 0.02477686 0.0006690248 0.01743079
## 133   133 0.03231843 0.2130271 0.02477798 0.0006697817 0.01751291
## 134   134 0.03232184 0.2128710 0.02478152 0.0006792375 0.01772989
## 135   135 0.03232410 0.2127933 0.02478475 0.0006782175 0.01805079
## 136   136 0.03232295 0.2128587 0.02478714 0.0006731029 0.01784783
## 137   137 0.03232279 0.2128908 0.02478568 0.0006807306 0.01791729
## 138   138 0.03232436 0.2128190 0.02478433 0.0006730078 0.01762938
## 139   139 0.03232519 0.2128030 0.02478738 0.0006709642 0.01775153
## 140   140 0.03232929 0.2126279 0.02479260 0.0006677773 0.01760076
## 141   141 0.03233320 0.2124836 0.02479889 0.0006668629 0.01780016
## 142   142 0.03233781 0.2123166 0.02480154 0.0006696850 0.01799406
## 143   143 0.03233998 0.2122240 0.02480142 0.0006658908 0.01794176
## 144   144 0.03234016 0.2122311 0.02480231 0.0006594374 0.01771390
## 145   145 0.03234141 0.2121824 0.02480182 0.0006631918 0.01791001
## 146   146 0.03233897 0.2122757 0.02479824 0.0006598533 0.01794154
## 147   147 0.03234519 0.2120168 0.02480364 0.0006542956 0.01785461
## 148   148 0.03234791 0.2119080 0.02480639 0.0006601656 0.01804786
## 149   149 0.03234977 0.2118486 0.02481176 0.0006573753 0.01812321
## 150   150 0.03235111 0.2117891 0.02481214 0.0006530542 0.01790175
## 151   151 0.03234832 0.2119088 0.02481160 0.0006520824 0.01766357
## 152   152 0.03235044 0.2118322 0.02481270 0.0006516064 0.01766578
## 153   153 0.03235227 0.2117640 0.02481414 0.0006461280 0.01756425
## 154   154 0.03235214 0.2117638 0.02481339 0.0006423693 0.01729767
## 155   155 0.03235460 0.2116566 0.02481464 0.0006443961 0.01742379
## 156   156 0.03235972 0.2114479 0.02481554 0.0006437772 0.01742887
## 157   157 0.03235838 0.2115184 0.02481226 0.0006410204 0.01734859
## 158   158 0.03235595 0.2116012 0.02480886 0.0006419238 0.01746059
## 159   159 0.03235487 0.2116210 0.02480730 0.0006429135 0.01723748
## 160   160 0.03235534 0.2116028 0.02480735 0.0006458017 0.01729672
## 161   161 0.03235570 0.2116011 0.02480748 0.0006428189 0.01712148
## 162   162 0.03235687 0.2115576 0.02480958 0.0006435010 0.01721393
## 163   163 0.03235900 0.2114603 0.02480906 0.0006429182 0.01709478
## 164   164 0.03236216 0.2113444 0.02481288 0.0006441590 0.01729570
## 165   165 0.03236091 0.2114163 0.02481053 0.0006366688 0.01731957
## 166   166 0.03236621 0.2111824 0.02481367 0.0006330920 0.01717849
## 167   167 0.03236727 0.2111255 0.02481650 0.0006344936 0.01720469
## 168   168 0.03236688 0.2111326 0.02481591 0.0006347949 0.01712746
## 169   169 0.03236699 0.2111183 0.02481761 0.0006354990 0.01713015
## 170   170 0.03236749 0.2111106 0.02481970 0.0006337320 0.01702172
## 171   171 0.03236936 0.2110378 0.02481802 0.0006364032 0.01697256
## 172   172 0.03236758 0.2111291 0.02481698 0.0006349982 0.01716994
## 173   173 0.03236904 0.2110815 0.02481690 0.0006312891 0.01720038
## 174   174 0.03237252 0.2109301 0.02481773 0.0006298071 0.01715220
## 175   175 0.03237751 0.2107119 0.02482002 0.0006331928 0.01723051
## 176   176 0.03237661 0.2107498 0.02481853 0.0006366684 0.01730502
## 177   177 0.03237703 0.2107364 0.02481799 0.0006389530 0.01737867
## 178   178 0.03237144 0.2109774 0.02481275 0.0006429214 0.01745663
## 179   179 0.03237052 0.2110118 0.02481390 0.0006446614 0.01752414
## 180   180 0.03237269 0.2109186 0.02481516 0.0006439697 0.01733143
## 181   181 0.03237371 0.2108729 0.02481463 0.0006468049 0.01737371
## 182   182 0.03237232 0.2109461 0.02481708 0.0006474986 0.01741364
## 183   183 0.03237082 0.2110170 0.02481548 0.0006495061 0.01758008
## 184   184 0.03237034 0.2110277 0.02481466 0.0006495325 0.01747987
## 185   185 0.03237032 0.2110391 0.02481405 0.0006458812 0.01743651
## 186   186 0.03237215 0.2109812 0.02481510 0.0006456283 0.01751284
## 187   187 0.03237146 0.2110177 0.02481349 0.0006401179 0.01738783
## 188   188 0.03236962 0.2110972 0.02481088 0.0006430461 0.01753206
## 189   189 0.03236659 0.2112221 0.02480894 0.0006418251 0.01740522
## 190   190 0.03236602 0.2112435 0.02480716 0.0006424705 0.01733675
## 191   191 0.03236795 0.2111648 0.02480921 0.0006436882 0.01736737
## 192   192 0.03236698 0.2112050 0.02480889 0.0006421717 0.01731728
## 193   193 0.03236698 0.2112192 0.02480998 0.0006384945 0.01731983
## 194   194 0.03236981 0.2110987 0.02481343 0.0006374238 0.01726627
## 195   195 0.03237122 0.2110417 0.02481500 0.0006379435 0.01728572
## 196   196 0.03237057 0.2110719 0.02481378 0.0006367568 0.01717570
## 197   197 0.03237101 0.2110479 0.02481387 0.0006383729 0.01712957
## 198   198 0.03237380 0.2109224 0.02481574 0.0006370567 0.01709733
## 199   199 0.03237346 0.2109479 0.02481436 0.0006371860 0.01703037
## 200   200 0.03237527 0.2108641 0.02481500 0.0006361776 0.01695425
## 201   201 0.03237643 0.2108167 0.02481498 0.0006373254 0.01695804
## 202   202 0.03237897 0.2106975 0.02481720 0.0006357683 0.01697012
## 203   203 0.03237886 0.2107060 0.02481771 0.0006335500 0.01694439
## 204   204 0.03237992 0.2106613 0.02481818 0.0006322319 0.01687075
## 205   205 0.03238026 0.2106487 0.02481779 0.0006332343 0.01683906
## 206   206 0.03237927 0.2106862 0.02481794 0.0006305485 0.01680520
## 207   207 0.03238009 0.2106523 0.02481809 0.0006302759 0.01682406
## 208   208 0.03238139 0.2105946 0.02481679 0.0006289167 0.01674445
## 209   209 0.03238385 0.2104831 0.02481849 0.0006286488 0.01669587
## 210   210 0.03238507 0.2104341 0.02482019 0.0006281745 0.01672407
## 211   211 0.03238560 0.2104239 0.02482110 0.0006274955 0.01674688
## 212   212 0.03238554 0.2104239 0.02482172 0.0006280513 0.01674109
## 213   213 0.03238479 0.2104645 0.02482073 0.0006279515 0.01686254
## 214   214 0.03238646 0.2103951 0.02482216 0.0006281572 0.01692305
## 215   215 0.03238514 0.2104512 0.02482065 0.0006274765 0.01689101
## 216   216 0.03238502 0.2104545 0.02482130 0.0006277536 0.01693075
## 217   217 0.03238477 0.2104636 0.02482091 0.0006284517 0.01689740
## 218   218 0.03238413 0.2104884 0.02482119 0.0006274836 0.01688905
## 219   219 0.03238395 0.2104972 0.02482028 0.0006266559 0.01690721
## 220   220 0.03238340 0.2105195 0.02482016 0.0006262486 0.01689334
## 221   221 0.03238413 0.2104852 0.02482106 0.0006259553 0.01692561
## 222   222 0.03238414 0.2104896 0.02482182 0.0006265314 0.01693600
## 223   223 0.03238496 0.2104543 0.02482251 0.0006288791 0.01697603
## 224   224 0.03238474 0.2104625 0.02482211 0.0006296008 0.01699212
## 225   225 0.03238457 0.2104718 0.02482250 0.0006297176 0.01699121
## 226   226 0.03238509 0.2104504 0.02482281 0.0006300274 0.01699901
## 227   227 0.03238365 0.2105133 0.02482164 0.0006296393 0.01701502
## 228   228 0.03238433 0.2104831 0.02482198 0.0006306961 0.01704026
## 229   229 0.03238361 0.2105155 0.02482148 0.0006313660 0.01708232
## 230   230 0.03238380 0.2105102 0.02482165 0.0006315468 0.01710672
## 231   231 0.03238317 0.2105367 0.02482155 0.0006313958 0.01711003
## 232   232 0.03238338 0.2105303 0.02482165 0.0006311156 0.01712688
## 233   233 0.03238360 0.2105224 0.02482189 0.0006309780 0.01712101
## 234   234 0.03238380 0.2105145 0.02482210 0.0006309593 0.01712433
## 235   235 0.03238392 0.2105097 0.02482210 0.0006306562 0.01712586
## 236   236 0.03238387 0.2105111 0.02482191 0.0006310423 0.01713022
## 237   237 0.03238369 0.2105198 0.02482176 0.0006305821 0.01712627
## 238   238 0.03238353 0.2105262 0.02482167 0.0006304126 0.01712057
## 239   239 0.03238357 0.2105246 0.02482160 0.0006303685 0.01712324
## 240   240 0.03238358 0.2105244 0.02482162 0.0006304950 0.01712347
##            MAESD
## 1   0.0005035442
## 2   0.0005078511
## 3   0.0006889891
## 4   0.0007548738
## 5   0.0007385957
## 6   0.0006920028
## 7   0.0006664314
## 8   0.0006186643
## 9   0.0006469633
## 10  0.0005912182
## 11  0.0006289068
## 12  0.0006182943
## 13  0.0006177591
## 14  0.0006042341
## 15  0.0005919262
## 16  0.0005803183
## 17  0.0005660014
## 18  0.0005649196
## 19  0.0005792929
## 20  0.0005805062
## 21  0.0005754812
## 22  0.0005857396
## 23  0.0005849419
## 24  0.0005857697
## 25  0.0005893670
## 26  0.0005746968
## 27  0.0005695664
## 28  0.0005748234
## 29  0.0005782365
## 30  0.0005812123
## 31  0.0005729425
## 32  0.0005560768
## 33  0.0005435714
## 34  0.0005268509
## 35  0.0005258267
## 36  0.0005372906
## 37  0.0005099928
## 38  0.0005273353
## 39  0.0005317214
## 40  0.0005310253
## 41  0.0005202883
## 42  0.0005281809
## 43  0.0005092409
## 44  0.0005014030
## 45  0.0005063414
## 46  0.0005154008
## 47  0.0005060265
## 48  0.0005160826
## 49  0.0005133404
## 50  0.0005056091
## 51  0.0005114902
## 52  0.0005045681
## 53  0.0005125909
## 54  0.0005120029
## 55  0.0005179201
## 56  0.0005190188
## 57  0.0005294137
## 58  0.0005287102
## 59  0.0005292448
## 60  0.0005275419
## 61  0.0005293760
## 62  0.0005294694
## 63  0.0005207046
## 64  0.0005077482
## 65  0.0005097810
## 66  0.0005172141
## 67  0.0005062920
## 68  0.0005158656
## 69  0.0005098187
## 70  0.0005104547
## 71  0.0005123847
## 72  0.0005150819
## 73  0.0005171535
## 74  0.0005116899
## 75  0.0005050942
## 76  0.0005005402
## 77  0.0005096489
## 78  0.0005111202
## 79  0.0004930274
## 80  0.0004979179
## 81  0.0005102649
## 82  0.0005099546
## 83  0.0005106727
## 84  0.0005047263
## 85  0.0004958115
## 86  0.0004866092
## 87  0.0004819546
## 88  0.0004838285
## 89  0.0004816415
## 90  0.0004870674
## 91  0.0004976717
## 92  0.0005008782
## 93  0.0005005591
## 94  0.0005075492
## 95  0.0005143739
## 96  0.0005087177
## 97  0.0005177536
## 98  0.0005307926
## 99  0.0005254822
## 100 0.0005208899
## 101 0.0005293344
## 102 0.0005303183
## 103 0.0005298093
## 104 0.0005259683
## 105 0.0005296589
## 106 0.0005289106
## 107 0.0005347681
## 108 0.0005378062
## 109 0.0005363936
## 110 0.0005311047
## 111 0.0005272277
## 112 0.0005300639
## 113 0.0005255277
## 114 0.0005219707
## 115 0.0005161007
## 116 0.0005192677
## 117 0.0005129045
## 118 0.0005121230
## 119 0.0005111384
## 120 0.0005145573
## 121 0.0005182254
## 122 0.0005165414
## 123 0.0005197525
## 124 0.0005203808
## 125 0.0005215439
## 126 0.0005215086
## 127 0.0005249020
## 128 0.0005203484
## 129 0.0005161997
## 130 0.0005217527
## 131 0.0005219166
## 132 0.0005247300
## 133 0.0005224166
## 134 0.0005253501
## 135 0.0005267184
## 136 0.0005232712
## 137 0.0005227132
## 138 0.0005180638
## 139 0.0005169164
## 140 0.0005125700
## 141 0.0005150180
## 142 0.0005193878
## 143 0.0005198928
## 144 0.0005150049
## 145 0.0005183667
## 146 0.0005200825
## 147 0.0005154546
## 148 0.0005171299
## 149 0.0005150451
## 150 0.0005145228
## 151 0.0005142472
## 152 0.0005116688
## 153 0.0005092936
## 154 0.0005035337
## 155 0.0004969038
## 156 0.0004963885
## 157 0.0004895049
## 158 0.0004905749
## 159 0.0004890772
## 160 0.0004908509
## 161 0.0004879461
## 162 0.0004921944
## 163 0.0004924436
## 164 0.0004978191
## 165 0.0004875441
## 166 0.0004830530
## 167 0.0004811613
## 168 0.0004786970
## 169 0.0004799493
## 170 0.0004770444
## 171 0.0004772587
## 172 0.0004778718
## 173 0.0004779222
## 174 0.0004752994
## 175 0.0004796504
## 176 0.0004831320
## 177 0.0004839843
## 178 0.0004872993
## 179 0.0004912828
## 180 0.0004915438
## 181 0.0004940018
## 182 0.0004960918
## 183 0.0004982899
## 184 0.0004972837
## 185 0.0004966243
## 186 0.0004964395
## 187 0.0004958306
## 188 0.0004947568
## 189 0.0004914570
## 190 0.0004924735
## 191 0.0004930920
## 192 0.0004939316
## 193 0.0004917803
## 194 0.0004914580
## 195 0.0004924554
## 196 0.0004919782
## 197 0.0004933488
## 198 0.0004908157
## 199 0.0004900275
## 200 0.0004890844
## 201 0.0004901487
## 202 0.0004911289
## 203 0.0004883890
## 204 0.0004879717
## 205 0.0004878651
## 206 0.0004856810
## 207 0.0004857460
## 208 0.0004844016
## 209 0.0004844103
## 210 0.0004836239
## 211 0.0004838267
## 212 0.0004858542
## 213 0.0004859223
## 214 0.0004853990
## 215 0.0004853695
## 216 0.0004852859
## 217 0.0004849754
## 218 0.0004839267
## 219 0.0004843070
## 220 0.0004845801
## 221 0.0004840746
## 222 0.0004842547
## 223 0.0004852225
## 224 0.0004856397
## 225 0.0004852937
## 226 0.0004857069
## 227 0.0004855894
## 228 0.0004858120
## 229 0.0004872522
## 230 0.0004869581
## 231 0.0004874034
## 232 0.0004878834
## 233 0.0004875616
## 234 0.0004873691
## 235 0.0004872939
## 236 0.0004875048
## 237 0.0004874131
## 238 0.0004874002
## 239 0.0004873677
## 240 0.0004875010
##    nvmax
## 12    12

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.9914336190  1.9842730715  1.9985941665
## x4          -0.0000526423 -0.0000700820 -0.0000352026
## x7           0.0111862837  0.0099543637  0.0124182036
## x8           0.0004690108  0.0001826029  0.0007554187
## x9           0.0038170729  0.0031744774  0.0044596685
## x10          0.0009995995  0.0004009321  0.0015982668
## x16          0.0009666310  0.0005522227  0.0013810393
## x17          0.0014834664  0.0008548562  0.0021120766
## stat14      -0.0009951099 -0.0014713783 -0.0005188415
## stat98       0.0034421836  0.0029701194  0.0039142477
## stat110     -0.0032377891 -0.0037177908 -0.0027577874
## stat149     -0.0008689431 -0.0013557374 -0.0003821488
## sqrt.x18     0.0267030082  0.0248675733  0.0285384431

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.045   2.084   2.097   2.097   2.109   2.143 
## [1] "leapBackward  Test MSE: 0.000976485518511151"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 34 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD  RsquaredSD
## 1       1 0.02889846 0.1473540 0.02339940 0.0007134151 0.009987013
## 2       2 0.02771803 0.2165405 0.02253170 0.0006944449 0.025945344
## 3       3 0.02710862 0.2507693 0.02197082 0.0005989025 0.028698566
## 4       4 0.02653713 0.2821401 0.02128824 0.0006615569 0.034159341
## 5       5 0.02608466 0.3059575 0.02097091 0.0006508910 0.030710048
## 6       6 0.02593409 0.3137841 0.02087253 0.0005915676 0.028883207
## 7       7 0.02592217 0.3143957 0.02090517 0.0006115201 0.031180139
## 8       8 0.02582133 0.3199456 0.02085804 0.0005953075 0.031445137
## 9       9 0.02576415 0.3228265 0.02082125 0.0005997096 0.032488117
## 10     10 0.02568801 0.3266456 0.02078380 0.0006045909 0.033584333
## 11     11 0.02566619 0.3278194 0.02077293 0.0005668776 0.031962892
## 12     12 0.02566777 0.3276937 0.02078077 0.0005525641 0.030649412
## 13     13 0.02569466 0.3263611 0.02080091 0.0005346902 0.032184814
## 14     14 0.02571543 0.3253304 0.02082811 0.0005288364 0.031641157
## 15     15 0.02572910 0.3246738 0.02084525 0.0005283189 0.031350665
## 16     16 0.02575335 0.3234460 0.02087032 0.0004995646 0.029548316
## 17     17 0.02577420 0.3224195 0.02089059 0.0004879945 0.029661615
## 18     18 0.02577583 0.3224024 0.02089136 0.0004899724 0.029970223
## 19     19 0.02579799 0.3213551 0.02092121 0.0004753014 0.030463059
## 20     20 0.02577810 0.3224452 0.02090958 0.0004522440 0.031054741
## 21     21 0.02574113 0.3244356 0.02088967 0.0004498528 0.032056159
## 22     22 0.02572635 0.3251851 0.02087045 0.0004337188 0.031605190
## 23     23 0.02572042 0.3255217 0.02087033 0.0004370053 0.030945562
## 24     24 0.02570598 0.3262007 0.02086021 0.0004358448 0.030925196
## 25     25 0.02569861 0.3266822 0.02085830 0.0004452349 0.030982232
## 26     26 0.02571745 0.3258365 0.02087593 0.0004296545 0.031825872
## 27     27 0.02571245 0.3260968 0.02088410 0.0004323255 0.032115920
## 28     28 0.02568711 0.3273919 0.02086490 0.0004195835 0.031298819
## 29     29 0.02567488 0.3279768 0.02084400 0.0004117809 0.030716174
## 30     30 0.02566202 0.3286642 0.02081498 0.0003812233 0.031082753
## 31     31 0.02564075 0.3298009 0.02080722 0.0003793686 0.031379875
## 32     32 0.02562261 0.3307052 0.02079429 0.0003942612 0.031102986
## 33     33 0.02562062 0.3308513 0.02079060 0.0004203615 0.030958620
## 34     34 0.02562000 0.3308530 0.02079713 0.0004175543 0.030424852
## 35     35 0.02562682 0.3305934 0.02080823 0.0004056446 0.029205105
## 36     36 0.02564432 0.3297498 0.02082335 0.0004197591 0.029159898
## 37     37 0.02562344 0.3307099 0.02080512 0.0004292207 0.028530335
## 38     38 0.02564034 0.3298884 0.02082036 0.0004384594 0.029402353
## 39     39 0.02564774 0.3295939 0.02082297 0.0004306587 0.029402868
## 40     40 0.02564499 0.3297931 0.02082323 0.0004394273 0.029471052
## 41     41 0.02566614 0.3287638 0.02084152 0.0004296779 0.029136202
## 42     42 0.02567517 0.3282899 0.02085085 0.0004302601 0.029264877
## 43     43 0.02568673 0.3276745 0.02085742 0.0004300523 0.029280566
## 44     44 0.02568630 0.3276993 0.02085843 0.0004391739 0.029516599
## 45     45 0.02570410 0.3268608 0.02086742 0.0004439791 0.029578379
## 46     46 0.02571917 0.3260744 0.02087433 0.0004292294 0.029315998
## 47     47 0.02572565 0.3257931 0.02088749 0.0004312525 0.029719026
## 48     48 0.02573772 0.3251750 0.02089391 0.0004325965 0.029697144
## 49     49 0.02573590 0.3253009 0.02090061 0.0004201462 0.029843320
## 50     50 0.02572933 0.3256548 0.02089336 0.0004180493 0.029987573
## 51     51 0.02574970 0.3246986 0.02091622 0.0004125811 0.030274232
## 52     52 0.02576765 0.3237674 0.02092930 0.0004076704 0.029744260
## 53     53 0.02576211 0.3240856 0.02092432 0.0004153130 0.030035759
## 54     54 0.02576328 0.3240529 0.02091771 0.0004195776 0.030308405
## 55     55 0.02575703 0.3243766 0.02090934 0.0004065520 0.029989996
## 56     56 0.02576309 0.3240610 0.02091765 0.0004083792 0.029340258
## 57     57 0.02576423 0.3239710 0.02091372 0.0004118390 0.029578657
## 58     58 0.02577094 0.3236303 0.02091350 0.0004109709 0.029032949
## 59     59 0.02574837 0.3247016 0.02089625 0.0004024183 0.029558136
## 60     60 0.02575306 0.3245387 0.02090540 0.0003919590 0.028802242
## 61     61 0.02574945 0.3247013 0.02090842 0.0003904933 0.028771923
## 62     62 0.02574586 0.3248712 0.02090655 0.0003898554 0.028627302
## 63     63 0.02574594 0.3248656 0.02091209 0.0003928285 0.028990433
## 64     64 0.02574842 0.3247747 0.02091640 0.0003898427 0.028911308
## 65     65 0.02575578 0.3244233 0.02092413 0.0003845622 0.028788699
## 66     66 0.02576537 0.3239800 0.02093673 0.0003877276 0.029170947
## 67     67 0.02577393 0.3236272 0.02093514 0.0003832447 0.028836270
## 68     68 0.02576713 0.3239626 0.02092843 0.0003980858 0.029505797
## 69     69 0.02578039 0.3233137 0.02094238 0.0003923654 0.029310459
## 70     70 0.02577803 0.3234579 0.02094287 0.0003971491 0.029588842
## 71     71 0.02578259 0.3232389 0.02094976 0.0003953169 0.029658672
## 72     72 0.02578463 0.3231842 0.02095242 0.0004001309 0.029414035
## 73     73 0.02577711 0.3235915 0.02094590 0.0004089370 0.029617373
## 74     74 0.02578907 0.3230654 0.02095859 0.0004147181 0.029711261
## 75     75 0.02578658 0.3232197 0.02096229 0.0004157153 0.029573477
## 76     76 0.02579314 0.3229061 0.02096260 0.0004299945 0.029236516
## 77     77 0.02580200 0.3224663 0.02096229 0.0004350028 0.028881583
## 78     78 0.02579739 0.3227095 0.02095940 0.0004246715 0.029077598
## 79     79 0.02579990 0.3226246 0.02095242 0.0004206633 0.029600850
## 80     80 0.02578988 0.3230908 0.02094458 0.0004189785 0.029528779
## 81     81 0.02578808 0.3231810 0.02093955 0.0004078340 0.029686590
## 82     82 0.02578513 0.3233622 0.02093435 0.0004159576 0.029503831
## 83     83 0.02578951 0.3231662 0.02093577 0.0004109062 0.029348391
## 84     84 0.02578656 0.3233425 0.02093207 0.0004166960 0.029175963
## 85     85 0.02576716 0.3243361 0.02091759 0.0004243869 0.029340982
## 86     86 0.02576015 0.3247082 0.02090980 0.0004277264 0.029309593
## 87     87 0.02576211 0.3246413 0.02091167 0.0004243903 0.029070311
## 88     88 0.02576446 0.3245761 0.02091084 0.0004195004 0.029011766
## 89     89 0.02576107 0.3247587 0.02090480 0.0004175664 0.029575731
## 90     90 0.02576566 0.3245331 0.02091023 0.0004248749 0.029820484
## 91     91 0.02576482 0.3245521 0.02090840 0.0004338353 0.030089450
## 92     92 0.02576678 0.3244313 0.02091071 0.0004350690 0.030577242
## 93     93 0.02575755 0.3248830 0.02090261 0.0004250815 0.030278454
## 94     94 0.02575567 0.3249768 0.02089542 0.0004289889 0.030367235
## 95     95 0.02575673 0.3249525 0.02089947 0.0004275340 0.030732900
## 96     96 0.02575637 0.3250275 0.02089558 0.0004282557 0.030413220
## 97     97 0.02576028 0.3248668 0.02089498 0.0004224426 0.030321750
## 98     98 0.02576513 0.3246693 0.02089837 0.0004167501 0.030172978
## 99     99 0.02577467 0.3242426 0.02090830 0.0004208935 0.029261201
## 100   100 0.02577946 0.3239983 0.02091239 0.0004103112 0.028773379
## 101   101 0.02576732 0.3245701 0.02089965 0.0004033589 0.029065027
## 102   102 0.02576255 0.3248556 0.02089835 0.0004088386 0.029116191
## 103   103 0.02576146 0.3249839 0.02089729 0.0004122079 0.029112138
## 104   104 0.02576653 0.3247279 0.02090018 0.0004133783 0.028988788
## 105   105 0.02576489 0.3248658 0.02089600 0.0004162262 0.028737387
## 106   106 0.02576909 0.3246588 0.02089514 0.0004179799 0.029170733
## 107   107 0.02577296 0.3244820 0.02089468 0.0004193050 0.029125107
## 108   108 0.02577387 0.3244361 0.02089080 0.0004214460 0.029408081
## 109   109 0.02578514 0.3239180 0.02089538 0.0004192622 0.029356840
## 110   110 0.02579216 0.3235625 0.02089977 0.0004174096 0.029546662
## 111   111 0.02579660 0.3233735 0.02090454 0.0004068491 0.029159067
## 112   112 0.02578556 0.3239245 0.02089348 0.0004019582 0.029429900
## 113   113 0.02578627 0.3239207 0.02089509 0.0004088836 0.029627584
## 114   114 0.02578967 0.3237580 0.02089813 0.0004052238 0.029945898
## 115   115 0.02578256 0.3241078 0.02089369 0.0004033838 0.029945413
## 116   116 0.02578351 0.3240781 0.02089614 0.0004108725 0.029972573
## 117   117 0.02578687 0.3239440 0.02089804 0.0004122374 0.030201904
## 118   118 0.02578733 0.3239479 0.02090054 0.0004177695 0.030430149
## 119   119 0.02577771 0.3243985 0.02089593 0.0004177903 0.030127450
## 120   120 0.02578426 0.3240817 0.02090204 0.0004211746 0.030013861
## 121   121 0.02579056 0.3237481 0.02090378 0.0004166081 0.029886051
## 122   122 0.02579117 0.3237324 0.02090606 0.0004220506 0.030045894
## 123   123 0.02578801 0.3238969 0.02090215 0.0004212131 0.030177101
## 124   124 0.02579265 0.3237102 0.02090831 0.0004203803 0.030011134
## 125   125 0.02579199 0.3237519 0.02090669 0.0004265283 0.030018761
## 126   126 0.02579032 0.3238371 0.02090969 0.0004272752 0.030312997
## 127   127 0.02579507 0.3236309 0.02091451 0.0004277347 0.030112870
## 128   128 0.02579673 0.3235728 0.02091911 0.0004280608 0.030067175
## 129   129 0.02580562 0.3231388 0.02092261 0.0004277798 0.030000432
## 130   130 0.02580524 0.3231365 0.02092636 0.0004250324 0.029660714
## 131   131 0.02580900 0.3229379 0.02092752 0.0004223614 0.029744837
## 132   132 0.02581336 0.3227438 0.02093647 0.0004176412 0.029393770
## 133   133 0.02581585 0.3226511 0.02093967 0.0004163015 0.029555096
## 134   134 0.02581661 0.3226236 0.02094215 0.0004129688 0.029373439
## 135   135 0.02580714 0.3230988 0.02093606 0.0004166460 0.029688248
## 136   136 0.02581190 0.3228645 0.02094195 0.0004167846 0.029782880
## 137   137 0.02581221 0.3228606 0.02094389 0.0004158436 0.029610377
## 138   138 0.02581477 0.3227336 0.02094874 0.0004223490 0.029417406
## 139   139 0.02581950 0.3225186 0.02094950 0.0004264815 0.029578401
## 140   140 0.02582877 0.3220841 0.02095464 0.0004286912 0.029650588
## 141   141 0.02582556 0.3222273 0.02095309 0.0004253253 0.029629057
## 142   142 0.02582474 0.3222672 0.02095264 0.0004268980 0.029697699
## 143   143 0.02582647 0.3221481 0.02095542 0.0004269894 0.029373706
## 144   144 0.02582219 0.3223622 0.02095259 0.0004241606 0.029494380
## 145   145 0.02582433 0.3222905 0.02095646 0.0004224966 0.029417939
## 146   146 0.02582757 0.3221648 0.02095783 0.0004277993 0.029480290
## 147   147 0.02582681 0.3221699 0.02095551 0.0004295496 0.029352346
## 148   148 0.02582420 0.3222653 0.02095251 0.0004264912 0.029159121
## 149   149 0.02582443 0.3222793 0.02095307 0.0004253072 0.028943886
## 150   150 0.02582951 0.3220465 0.02095985 0.0004242618 0.029250577
## 151   151 0.02583684 0.3217207 0.02096499 0.0004264185 0.029061528
## 152   152 0.02583838 0.3216423 0.02096353 0.0004273896 0.029215792
## 153   153 0.02583532 0.3217874 0.02096137 0.0004294295 0.029157537
## 154   154 0.02583816 0.3216508 0.02096101 0.0004316778 0.029331390
## 155   155 0.02583940 0.3215851 0.02096052 0.0004292865 0.029520951
## 156   156 0.02583897 0.3216125 0.02095778 0.0004263436 0.029467265
## 157   157 0.02583798 0.3216707 0.02095855 0.0004206348 0.029428301
## 158   158 0.02583577 0.3217852 0.02095485 0.0004215076 0.029470878
## 159   159 0.02583468 0.3218338 0.02095334 0.0004188736 0.029378204
## 160   160 0.02583702 0.3217186 0.02095773 0.0004168495 0.029211482
## 161   161 0.02583768 0.3216868 0.02095739 0.0004160403 0.029414105
## 162   162 0.02583392 0.3218835 0.02095665 0.0004142726 0.029171621
## 163   163 0.02583246 0.3219697 0.02095743 0.0004161646 0.029195605
## 164   164 0.02583256 0.3219877 0.02095559 0.0004227888 0.029426336
## 165   165 0.02583195 0.3220211 0.02095320 0.0004290197 0.029334806
## 166   166 0.02583440 0.3219248 0.02095680 0.0004333789 0.029616646
## 167   167 0.02583636 0.3218415 0.02095965 0.0004322829 0.029521657
## 168   168 0.02583768 0.3217738 0.02095847 0.0004361825 0.029428592
## 169   169 0.02583633 0.3218358 0.02095629 0.0004299017 0.029351919
## 170   170 0.02583972 0.3216656 0.02096095 0.0004329494 0.029491287
## 171   171 0.02584385 0.3214752 0.02096262 0.0004365440 0.029517566
## 172   172 0.02584763 0.3212975 0.02096593 0.0004359712 0.029534634
## 173   173 0.02584738 0.3213114 0.02096578 0.0004389054 0.029727318
## 174   174 0.02584467 0.3214428 0.02096071 0.0004380998 0.029692377
## 175   175 0.02584847 0.3212612 0.02096579 0.0004388351 0.029594787
## 176   176 0.02584799 0.3212822 0.02096319 0.0004405879 0.029862878
## 177   177 0.02585278 0.3210444 0.02096841 0.0004387030 0.029878070
## 178   178 0.02585301 0.3210599 0.02096810 0.0004406522 0.029972375
## 179   179 0.02585659 0.3209050 0.02097293 0.0004388365 0.030095991
## 180   180 0.02585613 0.3209446 0.02097292 0.0004413011 0.030269559
## 181   181 0.02585643 0.3209407 0.02097265 0.0004400576 0.030282633
## 182   182 0.02585361 0.3210781 0.02096849 0.0004396340 0.030334010
## 183   183 0.02585120 0.3211922 0.02096638 0.0004371481 0.030289058
## 184   184 0.02584892 0.3213057 0.02096397 0.0004399328 0.030477312
## 185   185 0.02585016 0.3212505 0.02096507 0.0004401309 0.030559002
## 186   186 0.02585312 0.3211147 0.02096710 0.0004381436 0.030600281
## 187   187 0.02585598 0.3209733 0.02096810 0.0004365929 0.030536237
## 188   188 0.02585894 0.3208266 0.02097056 0.0004392711 0.030542340
## 189   189 0.02586043 0.3207536 0.02097262 0.0004411037 0.030475675
## 190   190 0.02586084 0.3207358 0.02097389 0.0004425094 0.030504878
## 191   191 0.02586046 0.3207626 0.02097400 0.0004435396 0.030773741
## 192   192 0.02586133 0.3207148 0.02097611 0.0004432063 0.030771625
## 193   193 0.02586218 0.3206859 0.02097642 0.0004431232 0.030697009
## 194   194 0.02586477 0.3205655 0.02098041 0.0004437214 0.030754448
## 195   195 0.02586660 0.3204806 0.02098269 0.0004442444 0.030819602
## 196   196 0.02586531 0.3205394 0.02098233 0.0004434874 0.030794152
## 197   197 0.02586609 0.3204959 0.02098378 0.0004451750 0.030750793
## 198   198 0.02586815 0.3204053 0.02098505 0.0004457620 0.030778962
## 199   199 0.02586887 0.3203664 0.02098617 0.0004448458 0.030799671
## 200   200 0.02586998 0.3203097 0.02098707 0.0004458416 0.030859781
## 201   201 0.02587203 0.3202206 0.02098863 0.0004457233 0.030855196
## 202   202 0.02587151 0.3202469 0.02098818 0.0004433294 0.030695408
## 203   203 0.02587146 0.3202560 0.02098786 0.0004436211 0.030689134
## 204   204 0.02587299 0.3201825 0.02099019 0.0004437992 0.030715198
## 205   205 0.02587396 0.3201289 0.02099144 0.0004426524 0.030671292
## 206   206 0.02587204 0.3202142 0.02099043 0.0004425515 0.030717856
## 207   207 0.02587188 0.3202234 0.02099035 0.0004424949 0.030764425
## 208   208 0.02587266 0.3201919 0.02099158 0.0004442300 0.030807270
## 209   209 0.02587255 0.3201983 0.02099193 0.0004449624 0.030849318
## 210   210 0.02587182 0.3202336 0.02099095 0.0004441302 0.030853033
## 211   211 0.02587172 0.3202374 0.02099088 0.0004428857 0.030827583
## 212   212 0.02587074 0.3202781 0.02098995 0.0004431438 0.030888995
## 213   213 0.02587084 0.3202675 0.02099087 0.0004425933 0.030923854
## 214   214 0.02587115 0.3202524 0.02099099 0.0004415990 0.030891102
## 215   215 0.02587085 0.3202661 0.02099071 0.0004411895 0.030885704
## 216   216 0.02587108 0.3202537 0.02099052 0.0004409955 0.030882069
## 217   217 0.02587122 0.3202477 0.02099038 0.0004416564 0.030875243
## 218   218 0.02587216 0.3202028 0.02099119 0.0004415250 0.030888311
## 219   219 0.02587286 0.3201722 0.02099169 0.0004414740 0.030877639
## 220   220 0.02587241 0.3201906 0.02099084 0.0004407007 0.030860639
## 221   221 0.02587277 0.3201727 0.02099126 0.0004399664 0.030855239
## 222   222 0.02587296 0.3201639 0.02099159 0.0004398621 0.030831406
## 223   223 0.02587335 0.3201418 0.02099238 0.0004391525 0.030800926
## 224   224 0.02587356 0.3201301 0.02099234 0.0004384975 0.030807141
## 225   225 0.02587386 0.3201175 0.02099233 0.0004380978 0.030823935
## 226   226 0.02587387 0.3201163 0.02099235 0.0004387038 0.030815747
## 227   227 0.02587338 0.3201375 0.02099237 0.0004389619 0.030846027
## 228   228 0.02587336 0.3201349 0.02099205 0.0004389470 0.030840055
## 229   229 0.02587397 0.3201082 0.02099267 0.0004392338 0.030844485
## 230   230 0.02587352 0.3201269 0.02099247 0.0004391111 0.030839317
## 231   231 0.02587391 0.3201081 0.02099281 0.0004392433 0.030837420
## 232   232 0.02587420 0.3200940 0.02099298 0.0004392698 0.030842236
## 233   233 0.02587411 0.3200966 0.02099304 0.0004393985 0.030858542
## 234   234 0.02587423 0.3200912 0.02099286 0.0004397376 0.030856570
## 235   235 0.02587422 0.3200920 0.02099292 0.0004395790 0.030850723
## 236   236 0.02587439 0.3200851 0.02099309 0.0004394687 0.030836930
## 237   237 0.02587438 0.3200854 0.02099307 0.0004394771 0.030839524
## 238   238 0.02587455 0.3200776 0.02099327 0.0004395125 0.030831645
## 239   239 0.02587451 0.3200794 0.02099325 0.0004394825 0.030826420
## 240   240 0.02587453 0.3200784 0.02099330 0.0004394797 0.030826938
##            MAESD
## 1   0.0005549779
## 2   0.0005096954
## 3   0.0004777971
## 4   0.0004916343
## 5   0.0004714907
## 6   0.0004350187
## 7   0.0004591533
## 8   0.0004763347
## 9   0.0005008133
## 10  0.0005344009
## 11  0.0005243454
## 12  0.0005135503
## 13  0.0004836637
## 14  0.0004700333
## 15  0.0004807270
## 16  0.0004641492
## 17  0.0004721752
## 18  0.0004792862
## 19  0.0004690309
## 20  0.0004603767
## 21  0.0004596505
## 22  0.0004507001
## 23  0.0004505799
## 24  0.0004394724
## 25  0.0004461831
## 26  0.0004355554
## 27  0.0004493810
## 28  0.0004576561
## 29  0.0004651116
## 30  0.0004547715
## 31  0.0004395036
## 32  0.0004559246
## 33  0.0004820075
## 34  0.0004873762
## 35  0.0004874234
## 36  0.0004983242
## 37  0.0005077684
## 38  0.0005383381
## 39  0.0005388483
## 40  0.0005519300
## 41  0.0005462986
## 42  0.0005438943
## 43  0.0005478772
## 44  0.0005463073
## 45  0.0005397839
## 46  0.0005323582
## 47  0.0005372717
## 48  0.0005225452
## 49  0.0005167249
## 50  0.0005245173
## 51  0.0005153895
## 52  0.0005152972
## 53  0.0005103604
## 54  0.0005124948
## 55  0.0004931573
## 56  0.0004882430
## 57  0.0004849247
## 58  0.0004835424
## 59  0.0004690141
## 60  0.0004584435
## 61  0.0004522474
## 62  0.0004582852
## 63  0.0004551814
## 64  0.0004501877
## 65  0.0004521262
## 66  0.0004541514
## 67  0.0004501310
## 68  0.0004605754
## 69  0.0004596972
## 70  0.0004664404
## 71  0.0004724863
## 72  0.0004724951
## 73  0.0004706926
## 74  0.0004794035
## 75  0.0004818347
## 76  0.0004996429
## 77  0.0004957365
## 78  0.0004904012
## 79  0.0004875087
## 80  0.0004872079
## 81  0.0004729230
## 82  0.0004713056
## 83  0.0004687554
## 84  0.0004682121
## 85  0.0004811363
## 86  0.0004804907
## 87  0.0004792368
## 88  0.0004755347
## 89  0.0004767802
## 90  0.0004871606
## 91  0.0004875975
## 92  0.0004847059
## 93  0.0004739795
## 94  0.0004776276
## 95  0.0004778695
## 96  0.0004705930
## 97  0.0004634351
## 98  0.0004598006
## 99  0.0004611611
## 100 0.0004498085
## 101 0.0004463378
## 102 0.0004524569
## 103 0.0004533019
## 104 0.0004477570
## 105 0.0004499338
## 106 0.0004527283
## 107 0.0004552527
## 108 0.0004567452
## 109 0.0004525977
## 110 0.0004486072
## 111 0.0004407995
## 112 0.0004390816
## 113 0.0004433017
## 114 0.0004448669
## 115 0.0004452938
## 116 0.0004472312
## 117 0.0004460713
## 118 0.0004500515
## 119 0.0004508281
## 120 0.0004545072
## 121 0.0004509989
## 122 0.0004515085
## 123 0.0004482457
## 124 0.0004411449
## 125 0.0004484876
## 126 0.0004513726
## 127 0.0004514477
## 128 0.0004499824
## 129 0.0004518338
## 130 0.0004483738
## 131 0.0004492060
## 132 0.0004476888
## 133 0.0004478916
## 134 0.0004403478
## 135 0.0004492701
## 136 0.0004545811
## 137 0.0004529284
## 138 0.0004543502
## 139 0.0004573159
## 140 0.0004563008
## 141 0.0004589210
## 142 0.0004585259
## 143 0.0004590513
## 144 0.0004560844
## 145 0.0004565602
## 146 0.0004580652
## 147 0.0004597311
## 148 0.0004544928
## 149 0.0004545382
## 150 0.0004528096
## 151 0.0004522963
## 152 0.0004558840
## 153 0.0004578803
## 154 0.0004586133
## 155 0.0004582912
## 156 0.0004580704
## 157 0.0004561645
## 158 0.0004557821
## 159 0.0004566749
## 160 0.0004537016
## 161 0.0004560237
## 162 0.0004573809
## 163 0.0004595829
## 164 0.0004658899
## 165 0.0004699347
## 166 0.0004750005
## 167 0.0004734993
## 168 0.0004754106
## 169 0.0004703451
## 170 0.0004713896
## 171 0.0004730218
## 172 0.0004732840
## 173 0.0004762728
## 174 0.0004771537
## 175 0.0004769789
## 176 0.0004789481
## 177 0.0004802119
## 178 0.0004803007
## 179 0.0004798926
## 180 0.0004816165
## 181 0.0004828980
## 182 0.0004845291
## 183 0.0004819032
## 184 0.0004841910
## 185 0.0004853799
## 186 0.0004839804
## 187 0.0004833840
## 188 0.0004856585
## 189 0.0004887452
## 190 0.0004913864
## 191 0.0004938302
## 192 0.0004929983
## 193 0.0004912176
## 194 0.0004917636
## 195 0.0004927941
## 196 0.0004919815
## 197 0.0004924996
## 198 0.0004918704
## 199 0.0004907086
## 200 0.0004918495
## 201 0.0004910381
## 202 0.0004886705
## 203 0.0004895910
## 204 0.0004894733
## 205 0.0004898852
## 206 0.0004893167
## 207 0.0004900811
## 208 0.0004910765
## 209 0.0004911526
## 210 0.0004912331
## 211 0.0004902173
## 212 0.0004906138
## 213 0.0004912349
## 214 0.0004908640
## 215 0.0004904021
## 216 0.0004909617
## 217 0.0004908784
## 218 0.0004907172
## 219 0.0004907395
## 220 0.0004899024
## 221 0.0004896586
## 222 0.0004891352
## 223 0.0004884687
## 224 0.0004878878
## 225 0.0004876661
## 226 0.0004879251
## 227 0.0004884753
## 228 0.0004886225
## 229 0.0004888505
## 230 0.0004888986
## 231 0.0004891834
## 232 0.0004890514
## 233 0.0004893139
## 234 0.0004896283
## 235 0.0004894899
## 236 0.0004893882
## 237 0.0004895430
## 238 0.0004895299
## 239 0.0004895184
## 240 0.0004895355
##    nvmax
## 34    34

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.960194e+00  1.946886e+00  1.973502e+00
## x4          -5.950678e-05 -7.380272e-05 -4.521084e-05
## x7           1.205358e-02  1.104220e-02  1.306496e-02
## x8           5.519283e-04  3.170727e-04  7.867839e-04
## x9           3.681125e-03  3.156120e-03  4.206131e-03
## x10          1.324451e-03  8.341744e-04  1.814728e-03
## x11          2.061189e+05  8.909105e+04  3.231468e+05
## x16          1.013279e-03  6.740728e-04  1.352485e-03
## x17          1.497206e-03  9.820328e-04  2.012379e-03
## x21          1.380369e-04  7.070610e-05  2.053677e-04
## stat3        5.048793e-04  1.095246e-04  9.002339e-04
## stat4       -5.712539e-04 -9.668024e-04 -1.757054e-04
## stat13      -4.596296e-04 -8.509287e-04 -6.833046e-05
## stat14      -1.068486e-03 -1.458270e-03 -6.787028e-04
## stat23       5.364375e-04  1.441971e-04  9.286779e-04
## stat24      -6.086289e-04 -1.002043e-03 -2.152148e-04
## stat25      -5.493799e-04 -9.415458e-04 -1.572140e-04
## stat26      -6.336843e-04 -1.027143e-03 -2.402258e-04
## stat38       5.699507e-04  1.781871e-04  9.617143e-04
## stat41      -5.124150e-04 -9.012797e-04 -1.235503e-04
## stat86       5.343140e-04  1.416305e-04  9.269975e-04
## stat98       3.348459e-03  2.962186e-03  3.734732e-03
## stat99       5.387090e-04  1.420060e-04  9.354120e-04
## stat100      6.144649e-04  2.232181e-04  1.005712e-03
## stat110     -3.152648e-03 -3.545314e-03 -2.759983e-03
## stat128     -4.739148e-04 -8.655542e-04 -8.227544e-05
## stat144      5.184994e-04  1.272474e-04  9.097515e-04
## stat146     -7.072162e-04 -1.102285e-03 -3.121471e-04
## stat149     -6.266347e-04 -1.025849e-03 -2.274209e-04
## stat156      5.854740e-04  1.892452e-04  9.817027e-04
## stat172      5.372834e-04  1.448068e-04  9.297600e-04
## stat207      5.726452e-04  1.793794e-04  9.659110e-04
## stat210     -4.537163e-04 -8.486683e-04 -5.876430e-05
## stat217      5.427281e-04  1.485415e-04  9.369147e-04
## sqrt.x18     2.646783e-02  2.496955e-02  2.796611e-02

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.038   2.080   2.094   2.093   2.106   2.153 
## [1] "leapBackward  Test MSE: 0.000993977876327613"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03426821 0.1094342 0.02668433 0.0007671751 0.01830599
## 2       2 0.03341813 0.1535728 0.02594003 0.0008198294 0.02639043
## 3       3 0.03287257 0.1816561 0.02541294 0.0008503685 0.03153037
## 4       4 0.03239628 0.2054364 0.02473878 0.0008624627 0.03152274
## 5       5 0.03201215 0.2239753 0.02445310 0.0008662346 0.02725898
## 6       6 0.03191524 0.2285619 0.02438627 0.0008360375 0.02470748
## 7       7 0.03191420 0.2285037 0.02442852 0.0007968190 0.02338661
## 8       8 0.03185561 0.2313736 0.02438606 0.0007411257 0.02370581
## 9       9 0.03183263 0.2324701 0.02435534 0.0007863111 0.02567275
## 10     10 0.03180391 0.2338324 0.02433061 0.0007389905 0.02451299
## 11     11 0.03180100 0.2340895 0.02433364 0.0007557523 0.02594161
## 12     12 0.03177341 0.2353893 0.02431505 0.0007382138 0.02432139
## 13     13 0.03177927 0.2351046 0.02431925 0.0007220642 0.02438588
## 14     14 0.03178458 0.2348190 0.02433793 0.0006950445 0.02268287
## 15     15 0.03180628 0.2337481 0.02435280 0.0006775356 0.02121300
## 16     16 0.03182834 0.2327028 0.02437532 0.0006658929 0.02087200
## 17     17 0.03183880 0.2322673 0.02438065 0.0006477234 0.02002256
## 18     18 0.03185728 0.2313949 0.02439923 0.0006370540 0.01992706
## 19     19 0.03185094 0.2317484 0.02441870 0.0006584165 0.02053793
## 20     20 0.03187153 0.2308219 0.02443099 0.0006445921 0.02017289
## 21     21 0.03187670 0.2305879 0.02444431 0.0006554409 0.01962816
## 22     22 0.03193157 0.2280450 0.02446618 0.0006636876 0.01866207
## 23     23 0.03220506 0.2144058 0.02463013 0.0013033233 0.05060556
## 24     24 0.03221999 0.2132814 0.02475530 0.0010461105 0.05069817
## 25     25 0.03194817 0.2273820 0.02448602 0.0006996696 0.02017723
## 26     26 0.03256014 0.1960767 0.02502629 0.0011829000 0.06449997
## 27     27 0.03193463 0.2280680 0.02448302 0.0006941963 0.02006089
## 28     28 0.03191732 0.2289893 0.02447815 0.0006746884 0.02050072
## 29     29 0.03216240 0.2164437 0.02466902 0.0012994022 0.05448396
## 30     30 0.03222780 0.2132456 0.02471707 0.0010654391 0.05076201
## 31     31 0.03222427 0.2121543 0.02475728 0.0008563445 0.05975810
## 32     32 0.03218040 0.2156066 0.02469504 0.0012720289 0.05399015
## 33     33 0.03227820 0.2110054 0.02481322 0.0010994792 0.04946067
## 34     34 0.03225632 0.2117776 0.02475065 0.0009484983 0.05073391
## 35     35 0.03222257 0.2134006 0.02476978 0.0010543204 0.05320143
## 36     36 0.03193449 0.2283235 0.02449761 0.0006101159 0.02005000
## 37     37 0.03230027 0.2099408 0.02477547 0.0010328723 0.05066152
## 38     38 0.03252820 0.1986375 0.02499977 0.0015665520 0.06626574
## 39     39 0.03195353 0.2275365 0.02449400 0.0006035882 0.01981162
## 40     40 0.03223458 0.2117566 0.02473039 0.0008527042 0.05930968
## 41     41 0.03197916 0.2264087 0.02451799 0.0006078462 0.01905333
## 42     42 0.03253600 0.1987475 0.02493131 0.0016889621 0.06535552
## 43     43 0.03199025 0.2259398 0.02451315 0.0006053608 0.01949890
## 44     44 0.03200005 0.2255093 0.02451950 0.0006001799 0.01913796
## 45     45 0.03272455 0.1896639 0.02516538 0.0016624597 0.06934252
## 46     46 0.03265929 0.1920796 0.02502883 0.0010927712 0.06000322
## 47     47 0.03226625 0.2112345 0.02477552 0.0010003314 0.05315819
## 48     48 0.03234574 0.2082569 0.02479026 0.0010028067 0.04798308
## 49     49 0.03202586 0.2244628 0.02455840 0.0006317996 0.01813387
## 50     50 0.03205530 0.2232212 0.02457450 0.0006180965 0.01777112
## 51     51 0.03232429 0.2079965 0.02479260 0.0008262397 0.05798014
## 52     52 0.03240947 0.2055135 0.02484934 0.0010177544 0.04863635
## 53     53 0.03235134 0.2086383 0.02478527 0.0011900161 0.04573304
## 54     54 0.03235830 0.2072473 0.02484547 0.0009935450 0.05245274
## 55     55 0.03240600 0.2055609 0.02483901 0.0008905600 0.04651958
## 56     56 0.03236183 0.2064195 0.02482392 0.0008409282 0.05786674
## 57     57 0.03209801 0.2214532 0.02459895 0.0006224946 0.01823085
## 58     58 0.03260089 0.1953469 0.02504623 0.0014178806 0.06673803
## 59     59 0.03273541 0.1898055 0.02515135 0.0015697615 0.06224186
## 60     60 0.03240713 0.2061503 0.02482002 0.0012123447 0.04765822
## 61     61 0.03213500 0.2198927 0.02462556 0.0006162607 0.01843040
## 62     62 0.03244667 0.2046734 0.02486138 0.0013560542 0.04808782
## 63     63 0.03215701 0.2189629 0.02464433 0.0006139369 0.01803046
## 64     64 0.03215766 0.2189273 0.02464591 0.0006218411 0.01795563
## 65     65 0.03307610 0.1717833 0.02537480 0.0017589965 0.07906444
## 66     66 0.03240260 0.2064764 0.02485738 0.0011682959 0.04782406
## 67     67 0.03245184 0.2038469 0.02491774 0.0010584210 0.05217028
## 68     68 0.03243281 0.2042896 0.02490934 0.0009875502 0.05180930
## 69     69 0.03247683 0.2028551 0.02489857 0.0008430323 0.04426416
## 70     70 0.03277068 0.1885813 0.02517928 0.0016687932 0.06664156
## 71     71 0.03242590 0.2046693 0.02492203 0.0009815337 0.05134820
## 72     72 0.03307242 0.1735043 0.02535660 0.0017025747 0.06812361
## 73     73 0.03274999 0.1893266 0.02519027 0.0014187007 0.06241708
## 74     74 0.03247351 0.2034504 0.02488778 0.0012482703 0.04890970
## 75     75 0.03255479 0.1998676 0.02499959 0.0014105465 0.05352266
## 76     76 0.03245121 0.2036750 0.02493114 0.0009765381 0.05099617
## 77     77 0.03251195 0.2014532 0.02492597 0.0008409004 0.04393046
## 78     78 0.03246826 0.2034640 0.02494477 0.0010251321 0.05004582
## 79     79 0.03249800 0.2025270 0.02491435 0.0012367453 0.04881553
## 80     80 0.03332210 0.1601645 0.02564917 0.0017623747 0.07674888
## 81     81 0.03285264 0.1851102 0.02519512 0.0014697358 0.05960151
## 82     82 0.03281124 0.1864979 0.02513905 0.0012786326 0.05950950
## 83     83 0.03222722 0.2163614 0.02471890 0.0006420039 0.01771417
## 84     84 0.03250326 0.2008511 0.02492887 0.0008268950 0.05720863
## 85     85 0.03224487 0.2155728 0.02473057 0.0006327842 0.01722731
## 86     86 0.03224361 0.2156762 0.02472696 0.0006266048 0.01735536
## 87     87 0.03255015 0.1998370 0.02494935 0.0008483515 0.04524631
## 88     88 0.03281542 0.1860926 0.02520560 0.0012095323 0.06325593
## 89     89 0.03353451 0.1466315 0.02580151 0.0012394943 0.08315037
## 90     90 0.03224447 0.2156953 0.02472968 0.0006328250 0.01654155
## 91     91 0.03249997 0.2022330 0.02496955 0.0010074989 0.04896895
## 92     92 0.03246941 0.2041366 0.02491754 0.0011419323 0.04578289
## 93     93 0.03323487 0.1667124 0.02556290 0.0019086493 0.07365307
## 94     94 0.03309007 0.1728153 0.02545947 0.0017102434 0.07352081
## 95     95 0.03224991 0.2155542 0.02474239 0.0006460601 0.01655176
## 96     96 0.03305176 0.1741000 0.02542892 0.0016823663 0.07381768
## 97     97 0.03286066 0.1844755 0.02526903 0.0013564238 0.06643515
## 98     98 0.03352446 0.1507706 0.02576477 0.0017397857 0.08048035
## 99     99 0.03251339 0.2021156 0.02492477 0.0012340166 0.04823288
## 100   100 0.03225942 0.2152093 0.02474496 0.0006399410 0.01586513
## 101   101 0.03226446 0.2150404 0.02474343 0.0006421817 0.01618947
## 102   102 0.03225513 0.2154387 0.02473768 0.0006438099 0.01607143
## 103   103 0.03283967 0.1844047 0.02523667 0.0011134785 0.07214589
## 104   104 0.03250030 0.2029810 0.02492944 0.0011314900 0.04511233
## 105   105 0.03315666 0.1699633 0.02541003 0.0016645704 0.06851504
## 106   106 0.03227054 0.2148840 0.02475374 0.0006373315 0.01642928
## 107   107 0.03322201 0.1670545 0.02552160 0.0017646852 0.07373626
## 108   108 0.03282415 0.1854954 0.02517729 0.0012983821 0.07088538
## 109   109 0.03249735 0.2031876 0.02492625 0.0011530699 0.04641637
## 110   110 0.03256075 0.1990601 0.02498004 0.0008640481 0.05852104
## 111   111 0.03311996 0.1719134 0.02543200 0.0017889293 0.07353009
## 112   112 0.03226611 0.2151342 0.02474516 0.0006554389 0.01703271
## 113   113 0.03288074 0.1839876 0.02522228 0.0014309889 0.06371707
## 114   114 0.03227543 0.2147163 0.02475614 0.0006645228 0.01701140
## 115   115 0.03337801 0.1574352 0.02569651 0.0015230063 0.07922087
## 116   116 0.03263647 0.1966923 0.02505396 0.0010797002 0.05020372
## 117   117 0.03321571 0.1676567 0.02556187 0.0017727993 0.07598871
## 118   118 0.03259807 0.1981381 0.02498979 0.0009173959 0.04759434
## 119   119 0.03289656 0.1838491 0.02527353 0.0017114333 0.06670664
## 120   120 0.03336341 0.1612385 0.02571451 0.0017135007 0.06447402
## 121   121 0.03253584 0.2014309 0.02496462 0.0007730190 0.03799863
## 122   122 0.03227800 0.2147287 0.02476737 0.0006572017 0.01777742
## 123   123 0.03246212 0.2049111 0.02494460 0.0008406476 0.03741542
## 124   124 0.03255327 0.2011190 0.02501569 0.0010258472 0.04250819
## 125   125 0.03287780 0.1847978 0.02524487 0.0009962464 0.05213368
## 126   126 0.03229155 0.2141544 0.02477409 0.0006787235 0.01807010
## 127   127 0.03289373 0.1834947 0.02526243 0.0010029631 0.06220867
## 128   128 0.03253887 0.2017402 0.02495471 0.0011777628 0.04403461
## 129   129 0.03257536 0.2001865 0.02502645 0.0010343068 0.04267564
## 130   130 0.03248123 0.2035632 0.02493285 0.0008866290 0.04240732
## 131   131 0.03248635 0.2033814 0.02493807 0.0008841757 0.04233113
## 132   132 0.03231780 0.2130411 0.02478386 0.0006698649 0.01775513
## 133   133 0.03231970 0.2129706 0.02477967 0.0006721112 0.01759299
## 134   134 0.03262293 0.1967328 0.02506204 0.0010415767 0.03996750
## 135   135 0.03245152 0.2057532 0.02489534 0.0007552661 0.03014732
## 136   136 0.03232359 0.2128370 0.02478983 0.0006728767 0.01785368
## 137   137 0.03271834 0.1934310 0.02510180 0.0012998519 0.04709065
## 138   138 0.03232367 0.2128562 0.02478298 0.0006703701 0.01759968
## 139   139 0.03232694 0.2127434 0.02478701 0.0006720339 0.01792030
## 140   140 0.03232707 0.2127059 0.02478610 0.0006655669 0.01756071
## 141   141 0.03267727 0.1949452 0.02506157 0.0011478019 0.04750880
## 142   142 0.03275618 0.1915855 0.02514514 0.0013177016 0.04663473
## 143   143 0.03233641 0.2123508 0.02479514 0.0006641674 0.01794348
## 144   144 0.03255299 0.2012246 0.02495176 0.0006947927 0.03230338
## 145   145 0.03252694 0.2029856 0.02495705 0.0009678485 0.03044379
## 146   146 0.03291851 0.1832781 0.02529230 0.0011465747 0.04580054
## 147   147 0.03234209 0.2121625 0.02480180 0.0006554413 0.01795273
## 148   148 0.03257597 0.2006201 0.02499523 0.0011502750 0.04253818
## 149   149 0.03245586 0.2053257 0.02492220 0.0007632002 0.03373169
## 150   150 0.03235257 0.2116994 0.02481040 0.0006531247 0.01783619
## 151   151 0.03234854 0.2118553 0.02480872 0.0006522307 0.01797837
## 152   152 0.03254654 0.2021788 0.02497499 0.0009551675 0.03003312
## 153   153 0.03256824 0.2006353 0.02496384 0.0006777488 0.03204132
## 154   154 0.03282582 0.1884996 0.02522118 0.0011764602 0.04317004
## 155   155 0.03291638 0.1831507 0.02531307 0.0011724504 0.04867042
## 156   156 0.03261729 0.1986101 0.02503346 0.0008248250 0.03550950
## 157   157 0.03236270 0.2113320 0.02481604 0.0006426933 0.01720324
## 158   158 0.03235893 0.2114765 0.02481023 0.0006407215 0.01748444
## 159   159 0.03255059 0.2020322 0.02496873 0.0009519975 0.02977705
## 160   160 0.03261064 0.1993492 0.02503037 0.0009854615 0.03956329
## 161   161 0.03250948 0.2033679 0.02493707 0.0007622022 0.03252286
## 162   162 0.03235789 0.2115286 0.02481000 0.0006416878 0.01717629
## 163   163 0.03285493 0.1848021 0.02521460 0.0007880995 0.04647767
## 164   164 0.03236462 0.2112337 0.02481406 0.0006403334 0.01714144
## 165   165 0.03303164 0.1766927 0.02534512 0.0006809360 0.05244647
## 166   166 0.03261160 0.1988786 0.02502907 0.0008143821 0.03517467
## 167   167 0.03256556 0.2014084 0.02498222 0.0009643623 0.03065642
## 168   168 0.03278020 0.1903348 0.02513896 0.0009329435 0.03753074
## 169   169 0.03258838 0.1997676 0.02497725 0.0006656781 0.03193299
## 170   170 0.03236967 0.2110375 0.02482210 0.0006302471 0.01702591
## 171   171 0.03236970 0.2110330 0.02481788 0.0006341222 0.01706489
## 172   172 0.03275249 0.1912085 0.02515410 0.0008404370 0.04068444
## 173   173 0.03256166 0.2016074 0.02498394 0.0009464856 0.02973177
## 174   174 0.03237270 0.2109285 0.02481850 0.0006331708 0.01714731
## 175   175 0.03276373 0.1912264 0.02515174 0.0010476840 0.04810142
## 176   176 0.03237718 0.2107274 0.02481898 0.0006365413 0.01731163
## 177   177 0.03279752 0.1904752 0.02515899 0.0013138409 0.04773942
## 178   178 0.03279702 0.1905622 0.02516604 0.0014051559 0.04967714
## 179   179 0.03237025 0.2110247 0.02481318 0.0006467741 0.01760034
## 180   180 0.03257590 0.2001868 0.02498861 0.0006939617 0.04337144
## 181   181 0.03237282 0.2109112 0.02481404 0.0006455196 0.01732268
## 182   182 0.03237287 0.2109227 0.02481668 0.0006482776 0.01744460
## 183   183 0.03263147 0.1984964 0.02502642 0.0012338351 0.04576674
## 184   184 0.03236866 0.2111001 0.02481311 0.0006471514 0.01738600
## 185   185 0.03237032 0.2110391 0.02481405 0.0006458812 0.01743651
## 186   186 0.03237142 0.2110136 0.02481467 0.0006467892 0.01755864
## 187   187 0.03259449 0.2004801 0.02497356 0.0011511676 0.04359986
## 188   188 0.03251454 0.2033101 0.02494029 0.0008161184 0.03681825
## 189   189 0.03280987 0.1894130 0.02512871 0.0011220120 0.04844802
## 190   190 0.03236519 0.2112801 0.02480750 0.0006437626 0.01738841
## 191   191 0.03262597 0.1989198 0.02502960 0.0009859511 0.03932929
## 192   192 0.03258788 0.1998827 0.02496976 0.0006762709 0.03210609
## 193   193 0.03261051 0.1990516 0.02503950 0.0008135698 0.03478990
## 194   194 0.03236981 0.2110987 0.02481343 0.0006374238 0.01726627
## 195   195 0.03237075 0.2110552 0.02481483 0.0006378702 0.01726517
## 196   196 0.03237043 0.2110724 0.02481296 0.0006365789 0.01717547
## 197   197 0.03237101 0.2110479 0.02481387 0.0006383729 0.01712957
## 198   198 0.03290154 0.1858341 0.02525070 0.0013907798 0.05474296
## 199   199 0.03273981 0.1917438 0.02514317 0.0008393336 0.05344568
## 200   200 0.03237361 0.2109294 0.02481468 0.0006374305 0.01700977
## 201   201 0.03237630 0.2108123 0.02481599 0.0006373074 0.01696464
## 202   202 0.03256422 0.2011803 0.02497784 0.0008117547 0.03576381
## 203   203 0.03254167 0.2021817 0.02496567 0.0008344338 0.03815209
## 204   204 0.03260795 0.1999880 0.02498246 0.0011572975 0.04368603
## 205   205 0.03238100 0.2106208 0.02481862 0.0006333379 0.01688164
## 206   206 0.03237927 0.2106862 0.02481794 0.0006305485 0.01680520
## 207   207 0.03238022 0.2106458 0.02481808 0.0006304548 0.01682094
## 208   208 0.03238176 0.2105763 0.02481748 0.0006294251 0.01673578
## 209   209 0.03238406 0.2104716 0.02481876 0.0006289370 0.01669033
## 210   210 0.03238507 0.2104341 0.02482019 0.0006281745 0.01672407
## 211   211 0.03260292 0.1992542 0.02498208 0.0006705299 0.03203779
## 212   212 0.03260507 0.1999155 0.02502950 0.0011179061 0.03491546
## 213   213 0.03259913 0.1994353 0.02498151 0.0006662544 0.03173832
## 214   214 0.03238646 0.2103951 0.02482216 0.0006281572 0.01692305
## 215   215 0.03258736 0.1998826 0.02499996 0.0006857316 0.04299553
## 216   216 0.03260066 0.1993694 0.02498257 0.0006698993 0.03201019
## 217   217 0.03265393 0.1978422 0.02505535 0.0010052059 0.04029595
## 218   218 0.03260739 0.1998314 0.02503202 0.0011268773 0.03534320
## 219   219 0.03238395 0.2104972 0.02482028 0.0006266559 0.01690721
## 220   220 0.03301848 0.1799361 0.02534650 0.0014354265 0.05763696
## 221   221 0.03258732 0.1999141 0.02500431 0.0006893773 0.04313653
## 222   222 0.03263655 0.1980999 0.02504364 0.0008260545 0.03542484
## 223   223 0.03291817 0.1853855 0.02526445 0.0014036112 0.05485386
## 224   224 0.03287771 0.1863479 0.02520972 0.0012154145 0.05045663
## 225   225 0.03238457 0.2104718 0.02482250 0.0006297176 0.01699121
## 226   226 0.03238509 0.2104504 0.02482281 0.0006300274 0.01699901
## 227   227 0.03256622 0.2011911 0.02497142 0.0007996637 0.03515125
## 228   228 0.03283723 0.1885306 0.02515512 0.0011512041 0.04923950
## 229   229 0.03259182 0.1998200 0.02500498 0.0007019998 0.04352923
## 230   230 0.03238380 0.2105102 0.02482165 0.0006315468 0.01710672
## 231   231 0.03260030 0.1994223 0.02498624 0.0006738400 0.03209027
## 232   232 0.03256444 0.2013709 0.02498702 0.0008728946 0.04016208
## 233   233 0.03264232 0.1983754 0.02503479 0.0009823174 0.03908696
## 234   234 0.03265364 0.1978339 0.02503588 0.0012398985 0.04547855
## 235   235 0.03265145 0.1975447 0.02504994 0.0008580799 0.03690727
## 236   236 0.03256249 0.2014136 0.02497000 0.0007919211 0.03453009
## 237   237 0.03282383 0.1891952 0.02516466 0.0011680339 0.05711562
## 238   238 0.03282332 0.1891998 0.02516451 0.0011666505 0.05711300
## 239   239 0.03363529 0.1502536 0.02586293 0.0013392550 0.05251766
## 240   240 0.03238358 0.2105244 0.02482162 0.0006304950 0.01712347
##            MAESD
## 1   0.0005035442
## 2   0.0005078511
## 3   0.0006889891
## 4   0.0007548738
## 5   0.0007385957
## 6   0.0006920028
## 7   0.0006664314
## 8   0.0006186643
## 9   0.0006469633
## 10  0.0005912182
## 11  0.0006289068
## 12  0.0006182943
## 13  0.0006177591
## 14  0.0006042341
## 15  0.0005919262
## 16  0.0005803183
## 17  0.0005660014
## 18  0.0005649196
## 19  0.0005795214
## 20  0.0005747767
## 21  0.0005641470
## 22  0.0005832425
## 23  0.0010340177
## 24  0.0009358928
## 25  0.0005996618
## 26  0.0009844629
## 27  0.0005785205
## 28  0.0005812653
## 29  0.0010051291
## 30  0.0009443123
## 31  0.0009492483
## 32  0.0009747269
## 33  0.0009046412
## 34  0.0007471467
## 35  0.0009412746
## 36  0.0005183087
## 37  0.0009122228
## 38  0.0012986213
## 39  0.0005337162
## 40  0.0009433163
## 41  0.0005080690
## 42  0.0013102876
## 43  0.0005083388
## 44  0.0004999990
## 45  0.0012140322
## 46  0.0009659933
## 47  0.0008280439
## 48  0.0009065349
## 49  0.0005240828
## 50  0.0005115011
## 51  0.0008965505
## 52  0.0009068986
## 53  0.0009457418
## 54  0.0007984076
## 55  0.0007181984
## 56  0.0008943249
## 57  0.0004943965
## 58  0.0011095334
## 59  0.0012797107
## 60  0.0009598417
## 61  0.0004954057
## 62  0.0010728858
## 63  0.0004954288
## 64  0.0004859797
## 65  0.0014094398
## 66  0.0008519454
## 67  0.0008841151
## 68  0.0007947055
## 69  0.0006588021
## 70  0.0011467387
## 71  0.0007800067
## 72  0.0014021576
## 73  0.0010530615
## 74  0.0009932829
## 75  0.0009934523
## 76  0.0007914826
## 77  0.0006762621
## 78  0.0008811678
## 79  0.0009891580
## 80  0.0013155809
## 81  0.0012111003
## 82  0.0010390850
## 83  0.0005093038
## 84  0.0008476455
## 85  0.0004981802
## 86  0.0004894414
## 87  0.0006636002
## 88  0.0010495906
## 89  0.0010713817
## 90  0.0004870733
## 91  0.0008505914
## 92  0.0008049808
## 93  0.0014003403
## 94  0.0011907975
## 95  0.0005095626
## 96  0.0011407820
## 97  0.0010971769
## 98  0.0014533641
## 99  0.0010126988
## 100 0.0005166520
## 101 0.0005246808
## 102 0.0005265218
## 103 0.0011119620
## 104 0.0008031220
## 105 0.0014017309
## 106 0.0005215023
## 107 0.0013596249
## 108 0.0012358137
## 109 0.0008225237
## 110 0.0009215197
## 111 0.0015043263
## 112 0.0005332576
## 113 0.0012179273
## 114 0.0005367277
## 115 0.0011801886
## 116 0.0009279693
## 117 0.0014472858
## 118 0.0007474003
## 119 0.0012827930
## 120 0.0013430026
## 121 0.0006813428
## 122 0.0005235633
## 123 0.0006996131
## 124 0.0008158248
## 125 0.0009133824
## 126 0.0005232983
## 127 0.0010559943
## 128 0.0009639436
## 129 0.0008189950
## 130 0.0006993222
## 131 0.0006952711
## 132 0.0005222765
## 133 0.0005228603
## 134 0.0006893547
## 135 0.0005834655
## 136 0.0005217214
## 137 0.0007889632
## 138 0.0005089392
## 139 0.0005104563
## 140 0.0005066105
## 141 0.0007508732
## 142 0.0010085524
## 143 0.0005121960
## 144 0.0005853861
## 145 0.0006223127
## 146 0.0007463100
## 147 0.0005171031
## 148 0.0009974300
## 149 0.0006104594
## 150 0.0005110756
## 151 0.0005136416
## 152 0.0006086608
## 153 0.0005691697
## 154 0.0010793954
## 155 0.0007673702
## 156 0.0007248135
## 157 0.0004868085
## 158 0.0004901983
## 159 0.0005987174
## 160 0.0007515906
## 161 0.0005791165
## 162 0.0004914316
## 163 0.0006271020
## 164 0.0004965238
## 165 0.0008641614
## 166 0.0007206364
## 167 0.0006072662
## 168 0.0006240470
## 169 0.0005573259
## 170 0.0004778176
## 171 0.0004802517
## 172 0.0007279543
## 173 0.0006078100
## 174 0.0004767257
## 175 0.0007637106
## 176 0.0004828179
## 177 0.0008020183
## 178 0.0011009414
## 179 0.0004928913
## 180 0.0007899269
## 181 0.0004938203
## 182 0.0004959729
## 183 0.0010430982
## 184 0.0004968587
## 185 0.0004966243
## 186 0.0004969049
## 187 0.0007418999
## 188 0.0006129879
## 189 0.0007564072
## 190 0.0004921108
## 191 0.0007371655
## 192 0.0005765537
## 193 0.0007511005
## 194 0.0004914580
## 195 0.0004925137
## 196 0.0004904654
## 197 0.0004933488
## 198 0.0011259847
## 199 0.0008796416
## 200 0.0004897564
## 201 0.0004897834
## 202 0.0006471899
## 203 0.0006363730
## 204 0.0007451588
## 205 0.0004875551
## 206 0.0004856810
## 207 0.0004857216
## 208 0.0004857038
## 209 0.0004849272
## 210 0.0004836239
## 211 0.0005745750
## 212 0.0010258443
## 213 0.0005741007
## 214 0.0004853990
## 215 0.0008094643
## 216 0.0005758877
## 217 0.0007722195
## 218 0.0010337192
## 219 0.0004843070
## 220 0.0008770659
## 221 0.0008208924
## 222 0.0007383637
## 223 0.0011447008
## 224 0.0010618558
## 225 0.0004852937
## 226 0.0004857069
## 227 0.0006188524
## 228 0.0007773215
## 229 0.0008225170
## 230 0.0004869581
## 231 0.0005852287
## 232 0.0006750790
## 233 0.0007187363
## 234 0.0010357946
## 235 0.0007514823
## 236 0.0006160643
## 237 0.0009517826
## 238 0.0009513698
## 239 0.0010481399
## 240 0.0004875010
##    nvmax
## 12    12

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

##                  Estimate         2.5 %        97.5 %
## (Intercept)  1.9914336190  1.9842730715  1.9985941665
## x4          -0.0000526423 -0.0000700820 -0.0000352026
## x7           0.0111862837  0.0099543637  0.0124182036
## x8           0.0004690108  0.0001826029  0.0007554187
## x9           0.0038170729  0.0031744774  0.0044596685
## x10          0.0009995995  0.0004009321  0.0015982668
## x16          0.0009666310  0.0005522227  0.0013810393
## x17          0.0014834664  0.0008548562  0.0021120766
## stat14      -0.0009951099 -0.0014713783 -0.0005188415
## stat98       0.0034421836  0.0029701194  0.0039142477
## stat110     -0.0032377891 -0.0037177908 -0.0027577874
## stat149     -0.0008689431 -0.0013557374 -0.0003821488
## sqrt.x18     0.0267030082  0.0248675733  0.0285384431

Test

if (algo.stepwise.caret == TRUE){
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
  
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.045   2.084   2.097   2.097   2.109   2.143 
## [1] "leapSeq  Test MSE: 0.000976485518511151"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5584 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared    MAE       
##   0.01000000  0.03569714  0.10943424  0.02766807
##   0.01047616  0.03583339  0.10943424  0.02776539
##   0.01097499  0.03598234  0.10943424  0.02787192
##   0.01149757  0.03614511  0.10943424  0.02798781
##   0.01204504  0.03629197  0.08670772  0.02809446
##   0.01261857  0.03629745         NaN  0.02809877
##   0.01321941  0.03629745         NaN  0.02809877
##   0.01384886  0.03629745         NaN  0.02809877
##   0.01450829  0.03629745         NaN  0.02809877
##   0.01519911  0.03629745         NaN  0.02809877
##   0.01592283  0.03629745         NaN  0.02809877
##   0.01668101  0.03629745         NaN  0.02809877
##   0.01747528  0.03629745         NaN  0.02809877
##   0.01830738  0.03629745         NaN  0.02809877
##   0.01917910  0.03629745         NaN  0.02809877
##   0.02009233  0.03629745         NaN  0.02809877
##   0.02104904  0.03629745         NaN  0.02809877
##   0.02205131  0.03629745         NaN  0.02809877
##   0.02310130  0.03629745         NaN  0.02809877
##   0.02420128  0.03629745         NaN  0.02809877
##   0.02535364  0.03629745         NaN  0.02809877
##   0.02656088  0.03629745         NaN  0.02809877
##   0.02782559  0.03629745         NaN  0.02809877
##   0.02915053  0.03629745         NaN  0.02809877
##   0.03053856  0.03629745         NaN  0.02809877
##   0.03199267  0.03629745         NaN  0.02809877
##   0.03351603  0.03629745         NaN  0.02809877
##   0.03511192  0.03629745         NaN  0.02809877
##   0.03678380  0.03629745         NaN  0.02809877
##   0.03853529  0.03629745         NaN  0.02809877
##   0.04037017  0.03629745         NaN  0.02809877
##   0.04229243  0.03629745         NaN  0.02809877
##   0.04430621  0.03629745         NaN  0.02809877
##   0.04641589  0.03629745         NaN  0.02809877
##   0.04862602  0.03629745         NaN  0.02809877
##   0.05094138  0.03629745         NaN  0.02809877
##   0.05336699  0.03629745         NaN  0.02809877
##   0.05590810  0.03629745         NaN  0.02809877
##   0.05857021  0.03629745         NaN  0.02809877
##   0.06135907  0.03629745         NaN  0.02809877
##   0.06428073  0.03629745         NaN  0.02809877
##   0.06734151  0.03629745         NaN  0.02809877
##   0.07054802  0.03629745         NaN  0.02809877
##   0.07390722  0.03629745         NaN  0.02809877
##   0.07742637  0.03629745         NaN  0.02809877
##   0.08111308  0.03629745         NaN  0.02809877
##   0.08497534  0.03629745         NaN  0.02809877
##   0.08902151  0.03629745         NaN  0.02809877
##   0.09326033  0.03629745         NaN  0.02809877
##   0.09770100  0.03629745         NaN  0.02809877
##   0.10235310  0.03629745         NaN  0.02809877
##   0.10722672  0.03629745         NaN  0.02809877
##   0.11233240  0.03629745         NaN  0.02809877
##   0.11768120  0.03629745         NaN  0.02809877
##   0.12328467  0.03629745         NaN  0.02809877
##   0.12915497  0.03629745         NaN  0.02809877
##   0.13530478  0.03629745         NaN  0.02809877
##   0.14174742  0.03629745         NaN  0.02809877
##   0.14849683  0.03629745         NaN  0.02809877
##   0.15556761  0.03629745         NaN  0.02809877
##   0.16297508  0.03629745         NaN  0.02809877
##   0.17073526  0.03629745         NaN  0.02809877
##   0.17886495  0.03629745         NaN  0.02809877
##   0.18738174  0.03629745         NaN  0.02809877
##   0.19630407  0.03629745         NaN  0.02809877
##   0.20565123  0.03629745         NaN  0.02809877
##   0.21544347  0.03629745         NaN  0.02809877
##   0.22570197  0.03629745         NaN  0.02809877
##   0.23644894  0.03629745         NaN  0.02809877
##   0.24770764  0.03629745         NaN  0.02809877
##   0.25950242  0.03629745         NaN  0.02809877
##   0.27185882  0.03629745         NaN  0.02809877
##   0.28480359  0.03629745         NaN  0.02809877
##   0.29836472  0.03629745         NaN  0.02809877
##   0.31257158  0.03629745         NaN  0.02809877
##   0.32745492  0.03629745         NaN  0.02809877
##   0.34304693  0.03629745         NaN  0.02809877
##   0.35938137  0.03629745         NaN  0.02809877
##   0.37649358  0.03629745         NaN  0.02809877
##   0.39442061  0.03629745         NaN  0.02809877
##   0.41320124  0.03629745         NaN  0.02809877
##   0.43287613  0.03629745         NaN  0.02809877
##   0.45348785  0.03629745         NaN  0.02809877
##   0.47508102  0.03629745         NaN  0.02809877
##   0.49770236  0.03629745         NaN  0.02809877
##   0.52140083  0.03629745         NaN  0.02809877
##   0.54622772  0.03629745         NaN  0.02809877
##   0.57223677  0.03629745         NaN  0.02809877
##   0.59948425  0.03629745         NaN  0.02809877
##   0.62802914  0.03629745         NaN  0.02809877
##   0.65793322  0.03629745         NaN  0.02809877
##   0.68926121  0.03629745         NaN  0.02809877
##   0.72208090  0.03629745         NaN  0.02809877
##   0.75646333  0.03629745         NaN  0.02809877
##   0.79248290  0.03629745         NaN  0.02809877
##   0.83021757  0.03629745         NaN  0.02809877
##   0.86974900  0.03629745         NaN  0.02809877
##   0.91116276  0.03629745         NaN  0.02809877
##   0.95454846  0.03629745         NaN  0.02809877
##   1.00000000  0.03629745         NaN  0.02809877
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda       RMSE   Rsquared        MAE       RMSESD
## 1       1 0.01000000 0.03569714 0.10943424 0.02766807 0.0008020264
## 2       1 0.01047616 0.03583339 0.10943424 0.02776539 0.0008059748
## 3       1 0.01097499 0.03598234 0.10943424 0.02787192 0.0008102888
## 4       1 0.01149757 0.03614511 0.10943424 0.02798781 0.0008149949
## 5       1 0.01204504 0.03629197 0.08670772 0.02809446 0.0008053357
## 6       1 0.01261857 0.03629745        NaN 0.02809877 0.0008012625
## 7       1 0.01321941 0.03629745        NaN 0.02809877 0.0008012625
## 8       1 0.01384886 0.03629745        NaN 0.02809877 0.0008012625
## 9       1 0.01450829 0.03629745        NaN 0.02809877 0.0008012625
## 10      1 0.01519911 0.03629745        NaN 0.02809877 0.0008012625
## 11      1 0.01592283 0.03629745        NaN 0.02809877 0.0008012625
## 12      1 0.01668101 0.03629745        NaN 0.02809877 0.0008012625
## 13      1 0.01747528 0.03629745        NaN 0.02809877 0.0008012625
## 14      1 0.01830738 0.03629745        NaN 0.02809877 0.0008012625
## 15      1 0.01917910 0.03629745        NaN 0.02809877 0.0008012625
## 16      1 0.02009233 0.03629745        NaN 0.02809877 0.0008012625
## 17      1 0.02104904 0.03629745        NaN 0.02809877 0.0008012625
## 18      1 0.02205131 0.03629745        NaN 0.02809877 0.0008012625
## 19      1 0.02310130 0.03629745        NaN 0.02809877 0.0008012625
## 20      1 0.02420128 0.03629745        NaN 0.02809877 0.0008012625
## 21      1 0.02535364 0.03629745        NaN 0.02809877 0.0008012625
## 22      1 0.02656088 0.03629745        NaN 0.02809877 0.0008012625
## 23      1 0.02782559 0.03629745        NaN 0.02809877 0.0008012625
## 24      1 0.02915053 0.03629745        NaN 0.02809877 0.0008012625
## 25      1 0.03053856 0.03629745        NaN 0.02809877 0.0008012625
## 26      1 0.03199267 0.03629745        NaN 0.02809877 0.0008012625
## 27      1 0.03351603 0.03629745        NaN 0.02809877 0.0008012625
## 28      1 0.03511192 0.03629745        NaN 0.02809877 0.0008012625
## 29      1 0.03678380 0.03629745        NaN 0.02809877 0.0008012625
## 30      1 0.03853529 0.03629745        NaN 0.02809877 0.0008012625
## 31      1 0.04037017 0.03629745        NaN 0.02809877 0.0008012625
## 32      1 0.04229243 0.03629745        NaN 0.02809877 0.0008012625
## 33      1 0.04430621 0.03629745        NaN 0.02809877 0.0008012625
## 34      1 0.04641589 0.03629745        NaN 0.02809877 0.0008012625
## 35      1 0.04862602 0.03629745        NaN 0.02809877 0.0008012625
## 36      1 0.05094138 0.03629745        NaN 0.02809877 0.0008012625
## 37      1 0.05336699 0.03629745        NaN 0.02809877 0.0008012625
## 38      1 0.05590810 0.03629745        NaN 0.02809877 0.0008012625
## 39      1 0.05857021 0.03629745        NaN 0.02809877 0.0008012625
## 40      1 0.06135907 0.03629745        NaN 0.02809877 0.0008012625
## 41      1 0.06428073 0.03629745        NaN 0.02809877 0.0008012625
## 42      1 0.06734151 0.03629745        NaN 0.02809877 0.0008012625
## 43      1 0.07054802 0.03629745        NaN 0.02809877 0.0008012625
## 44      1 0.07390722 0.03629745        NaN 0.02809877 0.0008012625
## 45      1 0.07742637 0.03629745        NaN 0.02809877 0.0008012625
## 46      1 0.08111308 0.03629745        NaN 0.02809877 0.0008012625
## 47      1 0.08497534 0.03629745        NaN 0.02809877 0.0008012625
## 48      1 0.08902151 0.03629745        NaN 0.02809877 0.0008012625
## 49      1 0.09326033 0.03629745        NaN 0.02809877 0.0008012625
## 50      1 0.09770100 0.03629745        NaN 0.02809877 0.0008012625
## 51      1 0.10235310 0.03629745        NaN 0.02809877 0.0008012625
## 52      1 0.10722672 0.03629745        NaN 0.02809877 0.0008012625
## 53      1 0.11233240 0.03629745        NaN 0.02809877 0.0008012625
## 54      1 0.11768120 0.03629745        NaN 0.02809877 0.0008012625
## 55      1 0.12328467 0.03629745        NaN 0.02809877 0.0008012625
## 56      1 0.12915497 0.03629745        NaN 0.02809877 0.0008012625
## 57      1 0.13530478 0.03629745        NaN 0.02809877 0.0008012625
## 58      1 0.14174742 0.03629745        NaN 0.02809877 0.0008012625
## 59      1 0.14849683 0.03629745        NaN 0.02809877 0.0008012625
## 60      1 0.15556761 0.03629745        NaN 0.02809877 0.0008012625
## 61      1 0.16297508 0.03629745        NaN 0.02809877 0.0008012625
## 62      1 0.17073526 0.03629745        NaN 0.02809877 0.0008012625
## 63      1 0.17886495 0.03629745        NaN 0.02809877 0.0008012625
## 64      1 0.18738174 0.03629745        NaN 0.02809877 0.0008012625
## 65      1 0.19630407 0.03629745        NaN 0.02809877 0.0008012625
## 66      1 0.20565123 0.03629745        NaN 0.02809877 0.0008012625
## 67      1 0.21544347 0.03629745        NaN 0.02809877 0.0008012625
## 68      1 0.22570197 0.03629745        NaN 0.02809877 0.0008012625
## 69      1 0.23644894 0.03629745        NaN 0.02809877 0.0008012625
## 70      1 0.24770764 0.03629745        NaN 0.02809877 0.0008012625
## 71      1 0.25950242 0.03629745        NaN 0.02809877 0.0008012625
## 72      1 0.27185882 0.03629745        NaN 0.02809877 0.0008012625
## 73      1 0.28480359 0.03629745        NaN 0.02809877 0.0008012625
## 74      1 0.29836472 0.03629745        NaN 0.02809877 0.0008012625
## 75      1 0.31257158 0.03629745        NaN 0.02809877 0.0008012625
## 76      1 0.32745492 0.03629745        NaN 0.02809877 0.0008012625
## 77      1 0.34304693 0.03629745        NaN 0.02809877 0.0008012625
## 78      1 0.35938137 0.03629745        NaN 0.02809877 0.0008012625
## 79      1 0.37649358 0.03629745        NaN 0.02809877 0.0008012625
## 80      1 0.39442061 0.03629745        NaN 0.02809877 0.0008012625
## 81      1 0.41320124 0.03629745        NaN 0.02809877 0.0008012625
## 82      1 0.43287613 0.03629745        NaN 0.02809877 0.0008012625
## 83      1 0.45348785 0.03629745        NaN 0.02809877 0.0008012625
## 84      1 0.47508102 0.03629745        NaN 0.02809877 0.0008012625
## 85      1 0.49770236 0.03629745        NaN 0.02809877 0.0008012625
## 86      1 0.52140083 0.03629745        NaN 0.02809877 0.0008012625
## 87      1 0.54622772 0.03629745        NaN 0.02809877 0.0008012625
## 88      1 0.57223677 0.03629745        NaN 0.02809877 0.0008012625
## 89      1 0.59948425 0.03629745        NaN 0.02809877 0.0008012625
## 90      1 0.62802914 0.03629745        NaN 0.02809877 0.0008012625
## 91      1 0.65793322 0.03629745        NaN 0.02809877 0.0008012625
## 92      1 0.68926121 0.03629745        NaN 0.02809877 0.0008012625
## 93      1 0.72208090 0.03629745        NaN 0.02809877 0.0008012625
## 94      1 0.75646333 0.03629745        NaN 0.02809877 0.0008012625
## 95      1 0.79248290 0.03629745        NaN 0.02809877 0.0008012625
## 96      1 0.83021757 0.03629745        NaN 0.02809877 0.0008012625
## 97      1 0.86974900 0.03629745        NaN 0.02809877 0.0008012625
## 98      1 0.91116276 0.03629745        NaN 0.02809877 0.0008012625
## 99      1 0.95454846 0.03629745        NaN 0.02809877 0.0008012625
## 100     1 1.00000000 0.03629745        NaN 0.02809877 0.0008012625
##     RsquaredSD        MAESD
## 1   0.01830599 0.0004837323
## 2   0.01830599 0.0004861025
## 3   0.01830599 0.0004883795
## 4   0.01830599 0.0004905843
## 5   0.00516936 0.0004842642
## 6           NA 0.0004803115
## 7           NA 0.0004803115
## 8           NA 0.0004803115
## 9           NA 0.0004803115
## 10          NA 0.0004803115
## 11          NA 0.0004803115
## 12          NA 0.0004803115
## 13          NA 0.0004803115
## 14          NA 0.0004803115
## 15          NA 0.0004803115
## 16          NA 0.0004803115
## 17          NA 0.0004803115
## 18          NA 0.0004803115
## 19          NA 0.0004803115
## 20          NA 0.0004803115
## 21          NA 0.0004803115
## 22          NA 0.0004803115
## 23          NA 0.0004803115
## 24          NA 0.0004803115
## 25          NA 0.0004803115
## 26          NA 0.0004803115
## 27          NA 0.0004803115
## 28          NA 0.0004803115
## 29          NA 0.0004803115
## 30          NA 0.0004803115
## 31          NA 0.0004803115
## 32          NA 0.0004803115
## 33          NA 0.0004803115
## 34          NA 0.0004803115
## 35          NA 0.0004803115
## 36          NA 0.0004803115
## 37          NA 0.0004803115
## 38          NA 0.0004803115
## 39          NA 0.0004803115
## 40          NA 0.0004803115
## 41          NA 0.0004803115
## 42          NA 0.0004803115
## 43          NA 0.0004803115
## 44          NA 0.0004803115
## 45          NA 0.0004803115
## 46          NA 0.0004803115
## 47          NA 0.0004803115
## 48          NA 0.0004803115
## 49          NA 0.0004803115
## 50          NA 0.0004803115
## 51          NA 0.0004803115
## 52          NA 0.0004803115
## 53          NA 0.0004803115
## 54          NA 0.0004803115
## 55          NA 0.0004803115
## 56          NA 0.0004803115
## 57          NA 0.0004803115
## 58          NA 0.0004803115
## 59          NA 0.0004803115
## 60          NA 0.0004803115
## 61          NA 0.0004803115
## 62          NA 0.0004803115
## 63          NA 0.0004803115
## 64          NA 0.0004803115
## 65          NA 0.0004803115
## 66          NA 0.0004803115
## 67          NA 0.0004803115
## 68          NA 0.0004803115
## 69          NA 0.0004803115
## 70          NA 0.0004803115
## 71          NA 0.0004803115
## 72          NA 0.0004803115
## 73          NA 0.0004803115
## 74          NA 0.0004803115
## 75          NA 0.0004803115
## 76          NA 0.0004803115
## 77          NA 0.0004803115
## 78          NA 0.0004803115
## 79          NA 0.0004803115
## 80          NA 0.0004803115
## 81          NA 0.0004803115
## 82          NA 0.0004803115
## 83          NA 0.0004803115
## 84          NA 0.0004803115
## 85          NA 0.0004803115
## 86          NA 0.0004803115
## 87          NA 0.0004803115
## 88          NA 0.0004803115
## 89          NA 0.0004803115
## 90          NA 0.0004803115
## 91          NA 0.0004803115
## 92          NA 0.0004803115
## 93          NA 0.0004803115
## 94          NA 0.0004803115
## 95          NA 0.0004803115
## 96          NA 0.0004803115
## 97          NA 0.0004803115
## 98          NA 0.0004803115
## 99          NA 0.0004803115
## 100         NA 0.0004803115
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.093   2.095   2.097   2.097   2.099   2.100 
## [1] "glmnet LASSO Test MSE: 0.00120261430064076"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5305 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 4774, 4773, 4776, 4773, 4775, 4774, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared   MAE       
##   0.01000000  0.03057666  0.1473540  0.02449647
##   0.01047616  0.03073563  0.1473540  0.02460509
##   0.01097499  0.03090917  0.1473540  0.02472597
##   0.01149757  0.03109853  0.1473540  0.02485989
##   0.01204504  0.03127457  0.1304874  0.02498538
##   0.01261857  0.03127527        NaN  0.02498580
##   0.01321941  0.03127527        NaN  0.02498580
##   0.01384886  0.03127527        NaN  0.02498580
##   0.01450829  0.03127527        NaN  0.02498580
##   0.01519911  0.03127527        NaN  0.02498580
##   0.01592283  0.03127527        NaN  0.02498580
##   0.01668101  0.03127527        NaN  0.02498580
##   0.01747528  0.03127527        NaN  0.02498580
##   0.01830738  0.03127527        NaN  0.02498580
##   0.01917910  0.03127527        NaN  0.02498580
##   0.02009233  0.03127527        NaN  0.02498580
##   0.02104904  0.03127527        NaN  0.02498580
##   0.02205131  0.03127527        NaN  0.02498580
##   0.02310130  0.03127527        NaN  0.02498580
##   0.02420128  0.03127527        NaN  0.02498580
##   0.02535364  0.03127527        NaN  0.02498580
##   0.02656088  0.03127527        NaN  0.02498580
##   0.02782559  0.03127527        NaN  0.02498580
##   0.02915053  0.03127527        NaN  0.02498580
##   0.03053856  0.03127527        NaN  0.02498580
##   0.03199267  0.03127527        NaN  0.02498580
##   0.03351603  0.03127527        NaN  0.02498580
##   0.03511192  0.03127527        NaN  0.02498580
##   0.03678380  0.03127527        NaN  0.02498580
##   0.03853529  0.03127527        NaN  0.02498580
##   0.04037017  0.03127527        NaN  0.02498580
##   0.04229243  0.03127527        NaN  0.02498580
##   0.04430621  0.03127527        NaN  0.02498580
##   0.04641589  0.03127527        NaN  0.02498580
##   0.04862602  0.03127527        NaN  0.02498580
##   0.05094138  0.03127527        NaN  0.02498580
##   0.05336699  0.03127527        NaN  0.02498580
##   0.05590810  0.03127527        NaN  0.02498580
##   0.05857021  0.03127527        NaN  0.02498580
##   0.06135907  0.03127527        NaN  0.02498580
##   0.06428073  0.03127527        NaN  0.02498580
##   0.06734151  0.03127527        NaN  0.02498580
##   0.07054802  0.03127527        NaN  0.02498580
##   0.07390722  0.03127527        NaN  0.02498580
##   0.07742637  0.03127527        NaN  0.02498580
##   0.08111308  0.03127527        NaN  0.02498580
##   0.08497534  0.03127527        NaN  0.02498580
##   0.08902151  0.03127527        NaN  0.02498580
##   0.09326033  0.03127527        NaN  0.02498580
##   0.09770100  0.03127527        NaN  0.02498580
##   0.10235310  0.03127527        NaN  0.02498580
##   0.10722672  0.03127527        NaN  0.02498580
##   0.11233240  0.03127527        NaN  0.02498580
##   0.11768120  0.03127527        NaN  0.02498580
##   0.12328467  0.03127527        NaN  0.02498580
##   0.12915497  0.03127527        NaN  0.02498580
##   0.13530478  0.03127527        NaN  0.02498580
##   0.14174742  0.03127527        NaN  0.02498580
##   0.14849683  0.03127527        NaN  0.02498580
##   0.15556761  0.03127527        NaN  0.02498580
##   0.16297508  0.03127527        NaN  0.02498580
##   0.17073526  0.03127527        NaN  0.02498580
##   0.17886495  0.03127527        NaN  0.02498580
##   0.18738174  0.03127527        NaN  0.02498580
##   0.19630407  0.03127527        NaN  0.02498580
##   0.20565123  0.03127527        NaN  0.02498580
##   0.21544347  0.03127527        NaN  0.02498580
##   0.22570197  0.03127527        NaN  0.02498580
##   0.23644894  0.03127527        NaN  0.02498580
##   0.24770764  0.03127527        NaN  0.02498580
##   0.25950242  0.03127527        NaN  0.02498580
##   0.27185882  0.03127527        NaN  0.02498580
##   0.28480359  0.03127527        NaN  0.02498580
##   0.29836472  0.03127527        NaN  0.02498580
##   0.31257158  0.03127527        NaN  0.02498580
##   0.32745492  0.03127527        NaN  0.02498580
##   0.34304693  0.03127527        NaN  0.02498580
##   0.35938137  0.03127527        NaN  0.02498580
##   0.37649358  0.03127527        NaN  0.02498580
##   0.39442061  0.03127527        NaN  0.02498580
##   0.41320124  0.03127527        NaN  0.02498580
##   0.43287613  0.03127527        NaN  0.02498580
##   0.45348785  0.03127527        NaN  0.02498580
##   0.47508102  0.03127527        NaN  0.02498580
##   0.49770236  0.03127527        NaN  0.02498580
##   0.52140083  0.03127527        NaN  0.02498580
##   0.54622772  0.03127527        NaN  0.02498580
##   0.57223677  0.03127527        NaN  0.02498580
##   0.59948425  0.03127527        NaN  0.02498580
##   0.62802914  0.03127527        NaN  0.02498580
##   0.65793322  0.03127527        NaN  0.02498580
##   0.68926121  0.03127527        NaN  0.02498580
##   0.72208090  0.03127527        NaN  0.02498580
##   0.75646333  0.03127527        NaN  0.02498580
##   0.79248290  0.03127527        NaN  0.02498580
##   0.83021757  0.03127527        NaN  0.02498580
##   0.86974900  0.03127527        NaN  0.02498580
##   0.91116276  0.03127527        NaN  0.02498580
##   0.95454846  0.03127527        NaN  0.02498580
##   1.00000000  0.03127527        NaN  0.02498580
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda       RMSE  Rsquared        MAE       RMSESD
## 1       1 0.01000000 0.03057666 0.1473540 0.02449647 0.0007554009
## 2       1 0.01047616 0.03073563 0.1473540 0.02460509 0.0007560498
## 3       1 0.01097499 0.03090917 0.1473540 0.02472597 0.0007566963
## 4       1 0.01149757 0.03109853 0.1473540 0.02485989 0.0007573547
## 5       1 0.01204504 0.03127457 0.1304874 0.02498538 0.0007465516
## 6       1 0.01261857 0.03127527       NaN 0.02498580 0.0007461210
## 7       1 0.01321941 0.03127527       NaN 0.02498580 0.0007461210
## 8       1 0.01384886 0.03127527       NaN 0.02498580 0.0007461210
## 9       1 0.01450829 0.03127527       NaN 0.02498580 0.0007461210
## 10      1 0.01519911 0.03127527       NaN 0.02498580 0.0007461210
## 11      1 0.01592283 0.03127527       NaN 0.02498580 0.0007461210
## 12      1 0.01668101 0.03127527       NaN 0.02498580 0.0007461210
## 13      1 0.01747528 0.03127527       NaN 0.02498580 0.0007461210
## 14      1 0.01830738 0.03127527       NaN 0.02498580 0.0007461210
## 15      1 0.01917910 0.03127527       NaN 0.02498580 0.0007461210
## 16      1 0.02009233 0.03127527       NaN 0.02498580 0.0007461210
## 17      1 0.02104904 0.03127527       NaN 0.02498580 0.0007461210
## 18      1 0.02205131 0.03127527       NaN 0.02498580 0.0007461210
## 19      1 0.02310130 0.03127527       NaN 0.02498580 0.0007461210
## 20      1 0.02420128 0.03127527       NaN 0.02498580 0.0007461210
## 21      1 0.02535364 0.03127527       NaN 0.02498580 0.0007461210
## 22      1 0.02656088 0.03127527       NaN 0.02498580 0.0007461210
## 23      1 0.02782559 0.03127527       NaN 0.02498580 0.0007461210
## 24      1 0.02915053 0.03127527       NaN 0.02498580 0.0007461210
## 25      1 0.03053856 0.03127527       NaN 0.02498580 0.0007461210
## 26      1 0.03199267 0.03127527       NaN 0.02498580 0.0007461210
## 27      1 0.03351603 0.03127527       NaN 0.02498580 0.0007461210
## 28      1 0.03511192 0.03127527       NaN 0.02498580 0.0007461210
## 29      1 0.03678380 0.03127527       NaN 0.02498580 0.0007461210
## 30      1 0.03853529 0.03127527       NaN 0.02498580 0.0007461210
## 31      1 0.04037017 0.03127527       NaN 0.02498580 0.0007461210
## 32      1 0.04229243 0.03127527       NaN 0.02498580 0.0007461210
## 33      1 0.04430621 0.03127527       NaN 0.02498580 0.0007461210
## 34      1 0.04641589 0.03127527       NaN 0.02498580 0.0007461210
## 35      1 0.04862602 0.03127527       NaN 0.02498580 0.0007461210
## 36      1 0.05094138 0.03127527       NaN 0.02498580 0.0007461210
## 37      1 0.05336699 0.03127527       NaN 0.02498580 0.0007461210
## 38      1 0.05590810 0.03127527       NaN 0.02498580 0.0007461210
## 39      1 0.05857021 0.03127527       NaN 0.02498580 0.0007461210
## 40      1 0.06135907 0.03127527       NaN 0.02498580 0.0007461210
## 41      1 0.06428073 0.03127527       NaN 0.02498580 0.0007461210
## 42      1 0.06734151 0.03127527       NaN 0.02498580 0.0007461210
## 43      1 0.07054802 0.03127527       NaN 0.02498580 0.0007461210
## 44      1 0.07390722 0.03127527       NaN 0.02498580 0.0007461210
## 45      1 0.07742637 0.03127527       NaN 0.02498580 0.0007461210
## 46      1 0.08111308 0.03127527       NaN 0.02498580 0.0007461210
## 47      1 0.08497534 0.03127527       NaN 0.02498580 0.0007461210
## 48      1 0.08902151 0.03127527       NaN 0.02498580 0.0007461210
## 49      1 0.09326033 0.03127527       NaN 0.02498580 0.0007461210
## 50      1 0.09770100 0.03127527       NaN 0.02498580 0.0007461210
## 51      1 0.10235310 0.03127527       NaN 0.02498580 0.0007461210
## 52      1 0.10722672 0.03127527       NaN 0.02498580 0.0007461210
## 53      1 0.11233240 0.03127527       NaN 0.02498580 0.0007461210
## 54      1 0.11768120 0.03127527       NaN 0.02498580 0.0007461210
## 55      1 0.12328467 0.03127527       NaN 0.02498580 0.0007461210
## 56      1 0.12915497 0.03127527       NaN 0.02498580 0.0007461210
## 57      1 0.13530478 0.03127527       NaN 0.02498580 0.0007461210
## 58      1 0.14174742 0.03127527       NaN 0.02498580 0.0007461210
## 59      1 0.14849683 0.03127527       NaN 0.02498580 0.0007461210
## 60      1 0.15556761 0.03127527       NaN 0.02498580 0.0007461210
## 61      1 0.16297508 0.03127527       NaN 0.02498580 0.0007461210
## 62      1 0.17073526 0.03127527       NaN 0.02498580 0.0007461210
## 63      1 0.17886495 0.03127527       NaN 0.02498580 0.0007461210
## 64      1 0.18738174 0.03127527       NaN 0.02498580 0.0007461210
## 65      1 0.19630407 0.03127527       NaN 0.02498580 0.0007461210
## 66      1 0.20565123 0.03127527       NaN 0.02498580 0.0007461210
## 67      1 0.21544347 0.03127527       NaN 0.02498580 0.0007461210
## 68      1 0.22570197 0.03127527       NaN 0.02498580 0.0007461210
## 69      1 0.23644894 0.03127527       NaN 0.02498580 0.0007461210
## 70      1 0.24770764 0.03127527       NaN 0.02498580 0.0007461210
## 71      1 0.25950242 0.03127527       NaN 0.02498580 0.0007461210
## 72      1 0.27185882 0.03127527       NaN 0.02498580 0.0007461210
## 73      1 0.28480359 0.03127527       NaN 0.02498580 0.0007461210
## 74      1 0.29836472 0.03127527       NaN 0.02498580 0.0007461210
## 75      1 0.31257158 0.03127527       NaN 0.02498580 0.0007461210
## 76      1 0.32745492 0.03127527       NaN 0.02498580 0.0007461210
## 77      1 0.34304693 0.03127527       NaN 0.02498580 0.0007461210
## 78      1 0.35938137 0.03127527       NaN 0.02498580 0.0007461210
## 79      1 0.37649358 0.03127527       NaN 0.02498580 0.0007461210
## 80      1 0.39442061 0.03127527       NaN 0.02498580 0.0007461210
## 81      1 0.41320124 0.03127527       NaN 0.02498580 0.0007461210
## 82      1 0.43287613 0.03127527       NaN 0.02498580 0.0007461210
## 83      1 0.45348785 0.03127527       NaN 0.02498580 0.0007461210
## 84      1 0.47508102 0.03127527       NaN 0.02498580 0.0007461210
## 85      1 0.49770236 0.03127527       NaN 0.02498580 0.0007461210
## 86      1 0.52140083 0.03127527       NaN 0.02498580 0.0007461210
## 87      1 0.54622772 0.03127527       NaN 0.02498580 0.0007461210
## 88      1 0.57223677 0.03127527       NaN 0.02498580 0.0007461210
## 89      1 0.59948425 0.03127527       NaN 0.02498580 0.0007461210
## 90      1 0.62802914 0.03127527       NaN 0.02498580 0.0007461210
## 91      1 0.65793322 0.03127527       NaN 0.02498580 0.0007461210
## 92      1 0.68926121 0.03127527       NaN 0.02498580 0.0007461210
## 93      1 0.72208090 0.03127527       NaN 0.02498580 0.0007461210
## 94      1 0.75646333 0.03127527       NaN 0.02498580 0.0007461210
## 95      1 0.79248290 0.03127527       NaN 0.02498580 0.0007461210
## 96      1 0.83021757 0.03127527       NaN 0.02498580 0.0007461210
## 97      1 0.86974900 0.03127527       NaN 0.02498580 0.0007461210
## 98      1 0.91116276 0.03127527       NaN 0.02498580 0.0007461210
## 99      1 0.95454846 0.03127527       NaN 0.02498580 0.0007461210
## 100     1 1.00000000 0.03127527       NaN 0.02498580 0.0007461210
##      RsquaredSD        MAESD
## 1   0.009987013 0.0005847018
## 2   0.009987013 0.0005885215
## 3   0.009987013 0.0005944241
## 4   0.009987013 0.0005997731
## 5            NA 0.0005984154
## 6            NA 0.0005983992
## 7            NA 0.0005983992
## 8            NA 0.0005983992
## 9            NA 0.0005983992
## 10           NA 0.0005983992
## 11           NA 0.0005983992
## 12           NA 0.0005983992
## 13           NA 0.0005983992
## 14           NA 0.0005983992
## 15           NA 0.0005983992
## 16           NA 0.0005983992
## 17           NA 0.0005983992
## 18           NA 0.0005983992
## 19           NA 0.0005983992
## 20           NA 0.0005983992
## 21           NA 0.0005983992
## 22           NA 0.0005983992
## 23           NA 0.0005983992
## 24           NA 0.0005983992
## 25           NA 0.0005983992
## 26           NA 0.0005983992
## 27           NA 0.0005983992
## 28           NA 0.0005983992
## 29           NA 0.0005983992
## 30           NA 0.0005983992
## 31           NA 0.0005983992
## 32           NA 0.0005983992
## 33           NA 0.0005983992
## 34           NA 0.0005983992
## 35           NA 0.0005983992
## 36           NA 0.0005983992
## 37           NA 0.0005983992
## 38           NA 0.0005983992
## 39           NA 0.0005983992
## 40           NA 0.0005983992
## 41           NA 0.0005983992
## 42           NA 0.0005983992
## 43           NA 0.0005983992
## 44           NA 0.0005983992
## 45           NA 0.0005983992
## 46           NA 0.0005983992
## 47           NA 0.0005983992
## 48           NA 0.0005983992
## 49           NA 0.0005983992
## 50           NA 0.0005983992
## 51           NA 0.0005983992
## 52           NA 0.0005983992
## 53           NA 0.0005983992
## 54           NA 0.0005983992
## 55           NA 0.0005983992
## 56           NA 0.0005983992
## 57           NA 0.0005983992
## 58           NA 0.0005983992
## 59           NA 0.0005983992
## 60           NA 0.0005983992
## 61           NA 0.0005983992
## 62           NA 0.0005983992
## 63           NA 0.0005983992
## 64           NA 0.0005983992
## 65           NA 0.0005983992
## 66           NA 0.0005983992
## 67           NA 0.0005983992
## 68           NA 0.0005983992
## 69           NA 0.0005983992
## 70           NA 0.0005983992
## 71           NA 0.0005983992
## 72           NA 0.0005983992
## 73           NA 0.0005983992
## 74           NA 0.0005983992
## 75           NA 0.0005983992
## 76           NA 0.0005983992
## 77           NA 0.0005983992
## 78           NA 0.0005983992
## 79           NA 0.0005983992
## 80           NA 0.0005983992
## 81           NA 0.0005983992
## 82           NA 0.0005983992
## 83           NA 0.0005983992
## 84           NA 0.0005983992
## 85           NA 0.0005983992
## 86           NA 0.0005983992
## 87           NA 0.0005983992
## 88           NA 0.0005983992
## 89           NA 0.0005983992
## 90           NA 0.0005983992
## 91           NA 0.0005983992
## 92           NA 0.0005983992
## 93           NA 0.0005983992
## 94           NA 0.0005983992
## 95           NA 0.0005983992
## 96           NA 0.0005983992
## 97           NA 0.0005983992
## 98           NA 0.0005983992
## 99           NA 0.0005983992
## 100          NA 0.0005983992
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.089   2.092   2.094   2.093   2.095   2.096 
## [1] "glmnet LASSO Test MSE: 0.00120434242040802"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.374 on full training set
## Least Angle Regression 
## 
## 5584 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03629745        NaN  0.02809877
##   0.01010101  0.03586885  0.1094342  0.02779220
##   0.02020202  0.03548798  0.1094342  0.02751947
##   0.03030303  0.03515639  0.1094342  0.02728546
##   0.04040404  0.03486999  0.1235891  0.02707803
##   0.05050505  0.03460818  0.1354511  0.02689061
##   0.06060606  0.03437580  0.1465372  0.02671626
##   0.07070707  0.03415631  0.1581287  0.02654614
##   0.08080808  0.03394425  0.1690536  0.02637688
##   0.09090909  0.03374924  0.1781586  0.02621832
##   0.10101010  0.03356105  0.1869373  0.02606691
##   0.11111111  0.03338166  0.1944101  0.02592010
##   0.12121212  0.03321265  0.2005118  0.02577829
##   0.13131313  0.03305416  0.2054703  0.02564350
##   0.14141414  0.03290636  0.2094841  0.02551467
##   0.15151515  0.03276939  0.2127219  0.02539051
##   0.16161616  0.03264351  0.2153248  0.02527386
##   0.17171717  0.03253200  0.2174057  0.02516741
##   0.18181818  0.03243172  0.2193077  0.02506688
##   0.19191919  0.03233858  0.2213971  0.02497376
##   0.20202020  0.03225884  0.2232785  0.02489537
##   0.21212121  0.03218721  0.2251247  0.02482594
##   0.22222222  0.03212095  0.2269384  0.02476089
##   0.23232323  0.03206354  0.2284532  0.02470243
##   0.24242424  0.03201208  0.2298872  0.02464916
##   0.25252525  0.03196685  0.2311836  0.02460177
##   0.26262626  0.03192938  0.2322491  0.02456341
##   0.27272727  0.03190137  0.2329706  0.02453427
##   0.28282828  0.03187953  0.2334863  0.02451206
##   0.29292929  0.03186452  0.2337611  0.02449613
##   0.30303030  0.03185158  0.2340218  0.02448148
##   0.31313131  0.03184130  0.2342056  0.02447029
##   0.32323232  0.03183194  0.2344093  0.02446084
##   0.33333333  0.03182435  0.2345535  0.02445224
##   0.34343434  0.03181852  0.2346333  0.02444534
##   0.35353535  0.03181432  0.2346510  0.02444014
##   0.36363636  0.03181185  0.2345991  0.02443652
##   0.37373737  0.03181109  0.2344716  0.02443414
##   0.38383838  0.03181118  0.2343179  0.02443252
##   0.39393939  0.03181251  0.2341215  0.02443219
##   0.40404040  0.03181494  0.2338808  0.02443271
##   0.41414141  0.03181849  0.2335976  0.02443405
##   0.42424242  0.03182206  0.2333257  0.02443603
##   0.43434343  0.03182538  0.2330760  0.02443818
##   0.44444444  0.03182867  0.2328341  0.02444003
##   0.45454545  0.03183213  0.2325913  0.02444210
##   0.46464646  0.03183583  0.2323455  0.02444466
##   0.47474747  0.03183901  0.2321309  0.02444698
##   0.48484848  0.03184225  0.2319211  0.02444954
##   0.49494949  0.03184569  0.2317092  0.02445200
##   0.50505051  0.03184962  0.2314814  0.02445464
##   0.51515152  0.03185438  0.2312198  0.02445786
##   0.52525253  0.03185913  0.2309657  0.02446136
##   0.53535354  0.03186411  0.2307064  0.02446505
##   0.54545455  0.03186973  0.2304218  0.02446932
##   0.55555556  0.03187577  0.2301237  0.02447366
##   0.56565657  0.03188215  0.2298154  0.02447800
##   0.57575758  0.03188855  0.2295133  0.02448236
##   0.58585859  0.03189529  0.2291996  0.02448731
##   0.59595960  0.03190251  0.2288680  0.02449251
##   0.60606061  0.03191008  0.2285234  0.02449798
##   0.61616162  0.03191799  0.2281675  0.02450357
##   0.62626263  0.03192609  0.2278071  0.02450900
##   0.63636364  0.03193455  0.2274341  0.02451448
##   0.64646465  0.03194321  0.2270565  0.02452006
##   0.65656566  0.03195218  0.2266686  0.02452588
##   0.66666667  0.03196140  0.2262727  0.02453178
##   0.67676768  0.03197076  0.2258758  0.02453787
##   0.68686869  0.03198053  0.2254656  0.02454431
##   0.69696970  0.03199087  0.2250330  0.02455122
##   0.70707071  0.03200167  0.2245836  0.02455857
##   0.71717172  0.03201280  0.2241226  0.02456617
##   0.72727273  0.03202409  0.2236594  0.02457418
##   0.73737374  0.03203544  0.2231993  0.02458243
##   0.74747475  0.03204670  0.2227480  0.02459045
##   0.75757576  0.03205791  0.2223023  0.02459849
##   0.76767677  0.03206942  0.2218484  0.02460663
##   0.77777778  0.03208118  0.2213884  0.02461498
##   0.78787879  0.03209296  0.2209334  0.02462320
##   0.79797980  0.03210495  0.2204724  0.02463150
##   0.80808081  0.03211733  0.2199993  0.02463995
##   0.81818182  0.03212987  0.2195241  0.02464851
##   0.82828283  0.03214258  0.2190455  0.02465714
##   0.83838384  0.03215548  0.2185626  0.02466588
##   0.84848485  0.03216866  0.2180720  0.02467491
##   0.85858586  0.03218217  0.2175718  0.02468418
##   0.86868687  0.03219597  0.2170632  0.02469357
##   0.87878788  0.03220985  0.2165547  0.02470305
##   0.88888889  0.03222384  0.2160460  0.02471250
##   0.89898990  0.03223801  0.2155340  0.02472217
##   0.90909091  0.03225227  0.2150244  0.02473185
##   0.91919192  0.03226647  0.2145217  0.02474144
##   0.92929293  0.03228075  0.2140202  0.02475100
##   0.93939394  0.03229517  0.2135171  0.02476078
##   0.94949495  0.03230969  0.2130145  0.02477079
##   0.95959596  0.03232425  0.2125162  0.02478082
##   0.96969697  0.03233899  0.2120152  0.02479100
##   0.97979798  0.03235380  0.2115158  0.02480121
##   0.98989899  0.03236860  0.2110219  0.02481132
##   1.00000000  0.03238358  0.2105244  0.02482162
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3737374.

##     fraction
## 38 0.3737374
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.051   2.085   2.097   2.097   2.109   2.140 
## [1] "lars  Test MSE: 0.000969560263964122"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.566 on full training set
## Least Angle Regression 
## 
## 5305 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 4774, 4773, 4776, 4773, 4775, 4774, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03127527        NaN  0.02498580
##   0.01010101  0.03078087  0.1473540  0.02463569
##   0.02020202  0.03033942  0.1473540  0.02433498
##   0.03030303  0.02995563  0.1543020  0.02407136
##   0.04040404  0.02960497  0.1792700  0.02382912
##   0.05050505  0.02928118  0.1940872  0.02361134
##   0.06060606  0.02898961  0.2036037  0.02341797
##   0.07070707  0.02872617  0.2171915  0.02322933
##   0.08080808  0.02846969  0.2321532  0.02303775
##   0.09090909  0.02822871  0.2441789  0.02285574
##   0.10101010  0.02800510  0.2552666  0.02268621
##   0.11111111  0.02778800  0.2655643  0.02252114
##   0.12121212  0.02758279  0.2739761  0.02236573
##   0.13131313  0.02738973  0.2808112  0.02221965
##   0.14141414  0.02720908  0.2863411  0.02208039
##   0.15151515  0.02704109  0.2907973  0.02194989
##   0.16161616  0.02688681  0.2943683  0.02182737
##   0.17171717  0.02674989  0.2975673  0.02171617
##   0.18181818  0.02662116  0.3008109  0.02160881
##   0.19191919  0.02650213  0.3038024  0.02150744
##   0.20202020  0.02639700  0.3064779  0.02141640
##   0.21212121  0.02630278  0.3093857  0.02133762
##   0.22222222  0.02621510  0.3122304  0.02126629
##   0.23232323  0.02613338  0.3149688  0.02119944
##   0.24242424  0.02605869  0.3174243  0.02113882
##   0.25252525  0.02599462  0.3194295  0.02108864
##   0.26262626  0.02594067  0.3210415  0.02104617
##   0.27272727  0.02589471  0.3224635  0.02100701
##   0.28282828  0.02585686  0.3236075  0.02097549
##   0.29292929  0.02582514  0.3245400  0.02094856
##   0.30303030  0.02579668  0.3254744  0.02092520
##   0.31313131  0.02577050  0.3263988  0.02090489
##   0.32323232  0.02574630  0.3273082  0.02088716
##   0.33333333  0.02572476  0.3281301  0.02087186
##   0.34343434  0.02570457  0.3289198  0.02085783
##   0.35353535  0.02568656  0.3296220  0.02084588
##   0.36363636  0.02567104  0.3302116  0.02083567
##   0.37373737  0.02565757  0.3307027  0.02082631
##   0.38383838  0.02564543  0.3311374  0.02081714
##   0.39393939  0.02563393  0.3315520  0.02080794
##   0.40404040  0.02562313  0.3319441  0.02079958
##   0.41414141  0.02561291  0.3323173  0.02079190
##   0.42424242  0.02560348  0.3326592  0.02078548
##   0.43434343  0.02559477  0.3329733  0.02077955
##   0.44444444  0.02558661  0.3332700  0.02077383
##   0.45454545  0.02557909  0.3335416  0.02076807
##   0.46464646  0.02557206  0.3337960  0.02076275
##   0.47474747  0.02556596  0.3340056  0.02075801
##   0.48484848  0.02555978  0.3342280  0.02075350
##   0.49494949  0.02555372  0.3344538  0.02074919
##   0.50505051  0.02554857  0.3346375  0.02074505
##   0.51515152  0.02554440  0.3347741  0.02074162
##   0.52525253  0.02554083  0.3348861  0.02073853
##   0.53535354  0.02553776  0.3349784  0.02073554
##   0.54545455  0.02553577  0.3350188  0.02073350
##   0.55555556  0.02553432  0.3350368  0.02073199
##   0.56565657  0.02553358  0.3350233  0.02073086
##   0.57575758  0.02553373  0.3349671  0.02073032
##   0.58585859  0.02553464  0.3348775  0.02073020
##   0.59595960  0.02553585  0.3347768  0.02073033
##   0.60606061  0.02553743  0.3346634  0.02073090
##   0.61616162  0.02553943  0.3345333  0.02073175
##   0.62626263  0.02554201  0.3343776  0.02073355
##   0.63636364  0.02554511  0.3341993  0.02073599
##   0.64646465  0.02554840  0.3340172  0.02073847
##   0.65656566  0.02555211  0.3338181  0.02074151
##   0.66666667  0.02555652  0.3335883  0.02074494
##   0.67676768  0.02556124  0.3333468  0.02074907
##   0.68686869  0.02556623  0.3330965  0.02075328
##   0.69696970  0.02557169  0.3328277  0.02075771
##   0.70707071  0.02557761  0.3325407  0.02076223
##   0.71717172  0.02558403  0.3322330  0.02076720
##   0.72727273  0.02559084  0.3319098  0.02077232
##   0.73737374  0.02559779  0.3315849  0.02077758
##   0.74747475  0.02560498  0.3312519  0.02078302
##   0.75757576  0.02561270  0.3308973  0.02078865
##   0.76767677  0.02562068  0.3305349  0.02079490
##   0.77777778  0.02562878  0.3301713  0.02080136
##   0.78787879  0.02563736  0.3297892  0.02080811
##   0.79797980  0.02564614  0.3294015  0.02081504
##   0.80808081  0.02565495  0.3290183  0.02082180
##   0.81818182  0.02566406  0.3286251  0.02082888
##   0.82828283  0.02567357  0.3282172  0.02083613
##   0.83838384  0.02568322  0.3278076  0.02084350
##   0.84848485  0.02569318  0.3273876  0.02085120
##   0.85858586  0.02570353  0.3269536  0.02085894
##   0.86868687  0.02571412  0.3265122  0.02086688
##   0.87878788  0.02572506  0.3260585  0.02087506
##   0.88888889  0.02573627  0.3255958  0.02088363
##   0.89898990  0.02574772  0.3251267  0.02089242
##   0.90909091  0.02575953  0.3246454  0.02090164
##   0.91919192  0.02577137  0.3241663  0.02091092
##   0.92929293  0.02578345  0.3236804  0.02092050
##   0.93939394  0.02579567  0.3231918  0.02093009
##   0.94949495  0.02580799  0.3227003  0.02093978
##   0.95959596  0.02582058  0.3222012  0.02094982
##   0.96969697  0.02583358  0.3216869  0.02096027
##   0.97979798  0.02584694  0.3211595  0.02097103
##   0.98989899  0.02586062  0.3206220  0.02098212
##   1.00000000  0.02587453  0.3200784  0.02099330
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5656566.

##     fraction
## 57 0.5656566
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.043   2.081   2.094   2.093   2.105   2.145 
## [1] "lars  Test MSE: 0.000981662348879513"